public void AbsoluteConvergenceConstructorTest()
        {
            var criteria = new AbsoluteConvergence(iterations: 10, tolerance: 0.1);

            int progress = 1;

            do
            {
                // Do some processing...


                // Update current iteration information:
                criteria.NewValue = 12345.6 / progress++;
            } while (!criteria.HasConverged);


            // The method will converge after reaching the
            // maximum of 10 iterations with a final value
            // of 1371.73:

            int    iterations = criteria.CurrentIteration; // 10
            double value      = criteria.OldValue;         // 1371.7333333


            Assert.AreEqual(10, criteria.CurrentIteration);
            Assert.AreEqual(1371.7333333333333, criteria.OldValue);
        }
Beispiel #2
0
        public void AbsoluteConvergenceConstructorTest4()
        {
            var criteria = new AbsoluteConvergence(iterations: 0, tolerance: 1e-5, startValue: 1);

            criteria.CurrentIteration = -2;
            do
            {
                criteria.NewValue /= 10.0;
            } while (!criteria.HasConverged);

            Assert.AreEqual(4, criteria.CurrentIteration);
            Assert.AreEqual(-5, Math.Log10(criteria.OldValue));
            Assert.AreEqual(-6, Math.Log10(criteria.NewValue));
        }
Beispiel #3
0
        /// <summary>
        ///   Initializes a new instance of the <see cref="ResilientBackpropagation"/> class.
        /// </summary>
        ///
        /// <param name="parameters">The number of parameters in the function to be optimized.</param>
        ///
        public ResilientBackpropagation(int parameters)
        {
            convergence = new AbsoluteConvergence();

            solution         = new double[parameters];
            gradient         = new double[parameters];
            previousGradient = new double[parameters];
            weightsUpdates   = new double[parameters];

            // Initialize steps
            Reset(initialStep);

            for (int i = 0; i < solution.Length; i++)
            {
                solution[i] = Accord.Math.Tools.Random.NextDouble() * 2.0 - 1.0;
            }
        }
 /// <summary>
 ///   Initializes a new instance of the <see cref="BaseBaumWelchLearning"/> class.
 /// </summary>
 ///
 protected BaseBaumWelchLearning(IHiddenMarkovModel model)
 {
     this.convergence = new AbsoluteConvergence();
     this.model       = model;
 }
 /// <summary>
 ///   Creates a new <see cref="LowerBoundNewtonRaphson"/>.
 /// </summary>
 ///
 public LowerBoundNewtonRaphson()
 {
     convergence = new AbsoluteConvergence();
 }
        /// <summary>
        ///   Initializes a new instance of the <see cref="ResilientBackpropagation"/> class.
        /// </summary>
        /// 
        /// <param name="parameters">The number of parameters in the function to be optimized.</param>
        /// 
        public ResilientBackpropagation(int parameters)
        {
            convergence = new AbsoluteConvergence();

            solution = new double[parameters];
            gradient = new double[parameters];
            previousGradient = new double[parameters];
            weightsUpdates = new double[parameters];

            // Initialize steps
            Reset(initialStep);

            for (int i = 0; i < solution.Length; i++)
                solution[i] = Accord.Math.Tools.Random.NextDouble() * 2.0 - 1.0;
        }
        /// <summary>
        ///   Initializes a new instance of the <see cref="ResilientBackpropagation"/> class.
        /// </summary>
        /// 
        /// <param name="parameters">The number of parameters in the function to be optimized.</param>
        /// 
        public ResilientBackpropagation(int parameters)
        {
            convergence = new AbsoluteConvergence();

            solution = new double[parameters];
            gradient = new double[parameters];
            previousGradient = new double[parameters];
            weightsUpdates = new double[parameters];

            // Intialize steps
            Reset(initialStep);
        }
Beispiel #8
0
 /// <summary>
 ///   Creates a new instance of the Viterbi learning algorithm.
 /// </summary>
 /// 
 public ViterbiLearning(HiddenMarkovModel model)
 {
     this.convergence = new AbsoluteConvergence();
     this.mle = new MaximumLikelihoodLearning(model);
 }