private double run(T[][] observations, int[] outputs)
        {
            calculator.Inputs  = observations;
            calculator.Outputs = outputs;

            Converged               = true;
            optimizer.Tolerance     = Tolerance;
            optimizer.MaxIterations = Iterations;
            optimizer.Token         = Token;

            try
            {
                optimizer.Minimize(Model.Function.Weights);
            }
            catch (LineSearchFailedException)
            {
                // TODO: Restructure CG to avoid exceptions.
                Converged = false;
            }

            Model.Function.Weights = optimizer.Solution;

            // Return negative log-likelihood as error function
            return(-Model.LogLikelihood(observations, outputs));
        }
        public void TestRosenbrock()
        {
            Rosenbrock        cf    = new Rosenbrock();
            EndCriteria       ec    = new EndCriteria();
            ConjugateGradient optim = new ConjugateGradient(cf, ec);
            //  new SecantLineSearch(cf,ec));

            DoubleVector x0 = new DoubleVector(new double[5] {
                1.3, 0.7, 0.8, 1.9, 1.2
            });

            optim.Minimize(x0);

            //Console.WriteLine(optim.IterationVectors[0].ToString());
            //Console.WriteLine(optim.IterationVectors[1].ToString());
            //Console.WriteLine(optim.IterationVectors[2].ToString());

            //Console.WriteLine(optim.SolutionVector.ToString());

            Assert.AreEqual(optim.SolutionValue, 0.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[0], 1.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[1], 1.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[2], 1.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[3], 1.0, 0.2);
            Assert.AreEqual(optim.SolutionVector[4], 1.0, 0.4);
        }
示例#3
0
        public void TrainBackPropagation(double[] features, int[] classes, int iterations)
        {
            training_features = Matrix.FromDoubleArray(features, input_layer);
            training_classes  = Matrix.Unroll(classes, output_layer);

            ConjugateGradient cg = new ConjugateGradient(
                ((input_layer + 1) * hidden_layer) + ((hidden_layer + 1) * output_layer),
                CostFunction, Gradient);

            cg.MaxIterations = iterations;
            cg.Progress     += ConjugateDescentProgress;
            cg.Minimize();
            double[] solution = cg.Solution;

            theta_1 = Matrix.FromDoubleArray(solution.Take((input_layer + 1) * hidden_layer).ToArray(), hidden_layer);
            theta_2 = Matrix.FromDoubleArray(solution.Skip((input_layer + 1) * hidden_layer).ToArray(), output_layer);
        }
示例#4
0
        /// <summary>
        ///   Runs the learning algorithm with the specified input
        ///   training observations and corresponding output labels.
        /// </summary>
        ///
        /// <param name="observations">The training observations.</param>
        /// <param name="outputs">The observation's labels.</param>
        ///
        public double RunEpoch(T[][] observations, int[] outputs)
        {
            this.Inputs  = observations;
            this.Outputs = outputs;

            try
            {
                cg.Minimize(Model.Function.Weights);
            }
            catch (LineSearchFailedException)
            {
                // TODO: Restructure CG to avoid exceptions.
            }

            Model.Function.Weights = cg.Solution;

            return(Model.LogLikelihood(observations, outputs));
        }
        public void MinimizeTest2()
        {
            Func <double[], double>   f = BroydenFletcherGoldfarbShannoTest.rosenbrockFunction;
            Func <double[], double[]> g = BroydenFletcherGoldfarbShannoTest.rosenbrockGradient;

            Assert.AreEqual(104, f(new[] { -1.0, 2.0 }));


            int n = 2; // number of variables

            double[] initial = { -1.2, 1 };

            ConjugateGradient cg = new ConjugateGradient(n, f, g);

            cg.Method = ConjugateGradientMethod.PolakRibiere;

            Assert.IsTrue(cg.Minimize(initial));
            double actual   = cg.Value;
            double expected = 0;

            Assert.AreEqual(expected, actual, 1e-6);

            double[] result = cg.Solution;

            Assert.AreEqual(125, cg.Evaluations);
            Assert.AreEqual(32, cg.Iterations);
            Assert.AreEqual(1.0, result[0], 1e-3);
            Assert.AreEqual(1.0, result[1], 1e-3);
            Assert.IsFalse(double.IsNaN(result[0]));
            Assert.IsFalse(double.IsNaN(result[1]));

            double y = f(result);

            double[] d = g(result);

            Assert.AreEqual(0.0, y, 1e-6);
            Assert.AreEqual(0.0, d[0], 1e-3);
            Assert.AreEqual(0.0, d[1], 1e-3);

            Assert.IsFalse(double.IsNaN(y));
            Assert.IsFalse(double.IsNaN(d[0]));
            Assert.IsFalse(double.IsNaN(d[1]));
        }
示例#6
0
        public void TestRosenbrock()
        {
            var cf    = new Rosenbrock();
            var ec    = new EndCriteria();
            var optim = new ConjugateGradient(cf, ec);
            //  new SecantLineSearch(cf,ec));

            var x0 = new DoubleVector(new double[5] {
                1.3, 0.7, 0.8, 1.9, 1.2
            });

            optim.Minimize(x0);

            Assert.AreEqual(optim.SolutionValue, 0.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[0], 1.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[1], 1.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[2], 1.0, 0.1);
            Assert.AreEqual(optim.SolutionVector[3], 1.0, 0.2);
            Assert.AreEqual(optim.SolutionVector[4], 1.0, 0.4);
        }
示例#7
0
        /// <summary>
        ///   Runs the learning algorithm with the specified input
        ///   training observations and corresponding output labels.
        /// </summary>
        ///
        /// <param name="observations">The training observations.</param>
        /// <param name="outputs">The observation's labels.</param>
        ///
        public double RunEpoch(T[][] observations, int[] outputs)
        {
            this.Inputs  = observations;
            this.Outputs = outputs;

            Converged = true;

            try
            {
                cg.Minimize(Model.Function.Weights);
            }
            catch (LineSearchFailedException)
            {
                // TODO: Restructure CG to avoid exceptions.
                Converged = false;
            }

            Model.Function.Weights = cg.Solution;

            // Return negative log-likelihood as error function
            return(-Model.LogLikelihood(observations, outputs));
        }