Exemplo n.º 1
0
        private void Learn(double[][] input, double[] output)
        {
            //LinearDualCoordinateDescent
            //StochasticGradientDescent
            //ProbabilisticDualCoordinateDescent
            var teacher = new StochasticGradientDescent()
            {
                //Loss = Loss.L1,
                //Complexity = 1000,
                //Tolerance = .1
            };

            _svm = teacher.Learn(input, output);
        }
Exemplo n.º 2
0
        public void LearnTest()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor =
            {
                -1,
                1,
                1,
                -1
            };

            var kernel = new Polynomial(2, 0.0);

            double[][] augmented = new double[inputs.Length][];
            for (int i = 0; i < inputs.Length; i++)
            {
                augmented[i] = kernel.Transform(inputs[i]);
            }

            // Create the Least Squares Support Vector Machine teacher
            var learn = new StochasticGradientDescent()
            {
                LearningRate = 1e-3
            };

            // Run the learning algorithm
            var svm = learn.Learn(augmented, xor);

            bool[] predicted = svm.Decide(augmented);
            double error     = new ZeroOneLoss(xor).Loss(predicted);

            Assert.AreEqual(0, error);

            int[] output = augmented.Apply(p => Math.Sign(svm.Compute(p)));
            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
            }
        }
Exemplo n.º 3
0
        public void ComputeGradient()
        {
            Random random = new Random();
            StochasticGradientDescent <char> trainer = new StochasticGradientDescent <char>(this.net)
            {
                LearningRate = 0.0001,
                Momentum     = 0.0,
                BatchSize    = 1,
                L2Decay      = 0.0
            };

            // here we only test the gradient at data, but if this is
            // right then that's comforting, because it is a function
            // of all gradients above, for all layers.

            Volume volume = new Volume(new float[] { random.NextDouble() * 2 - 1, random.NextDouble() * 2 - 1 });
            int    gti    = (int)Math.Floor(random.NextDouble() * 3);                     // ground truth index

            trainer.Learn(Enumerable.Repeat(Tuple.Create(volume, this.classes[gti]), 1)); // computes gradients at all layers, and at x

            Volume gradient = this.net.Layers[0].InputGradient;

            float delta = 0.000001;

            for (int i = 0; i < volume.Length; i++)
            {
                float gradAnalytic = gradient[i];

                float xold = volume[i];
                volume[i] += delta;
                float c0 = this.net.CostLoss(this.net.Compute(volume, false), this.classes[gti]);
                volume[i] -= 2 * delta;
                float c1 = this.net.CostLoss(this.net.Compute(volume, false), this.classes[gti]);
                volume[i] = xold; // reset

                float gradNumeric = (c0 - c1) / (2 * delta);
                float relError    = Math.Abs(gradAnalytic - gradNumeric) / Math.Abs(gradAnalytic + gradNumeric);

                Console.WriteLine("step: {0}, numeric: {1}, analytic: {2}, => relError: {3}", i, gradNumeric, gradAnalytic, relError);
                Assert.IsTrue(relError < 1e-2);
            }
        }