コード例 #1
0
        public void TestBackPropagationWithoutRegularisaion()
        {
            NeuralNetwork.CostFunctionWithThetaParameter backProp =
                t =>
            {
                var backPropagationResult = NeuralNetwork.BackPropagation(Xm, ym, t, new List <int>()
                {
                    4
                }, 4, 0);
                return(backPropagationResult);
            };

            var bothThetas = NeuralNetwork.PackThetas(new List <Matrix <double> >()
            {
                theta1NN, theta2NN
            });
            var resultNumericalGradient  = NeuralNetwork.ComputeNumericalGradient(backProp, bothThetas);
            var pathNumericalGradients   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\NumericalGradientForBackPropagationWithoutRegularisation.txt");
            var matrixNumericalGradients = DelimitedReader.Read <double>(pathNumericalGradients);

            Assert.IsTrue(MatricesEqual(resultNumericalGradient, matrixNumericalGradients));

            var resultBackPropagation = NeuralNetwork.BackPropagation(Xm, ym, bothThetas, new List <int>()
            {
                4
            }, 4, 0);
            var pathGradientForBackPropagation   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\GradientForBackPropagationWithoutRegularisation.txt");
            var matrixGradientForBackPropagation = DelimitedReader.Read <double>(pathGradientForBackPropagation);

            Assert.IsTrue(Equalities.DoubleEquals(resultBackPropagation.Item1, 3.08744915815864));
            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, matrixGradientForBackPropagation));
            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, resultNumericalGradient, 0.000000001));
        }
コード例 #2
0
        public static Tuple <Matrix <double>, Matrix <double> > GradientDescent(CostFunctionWithThetaParameter func,
                                                                                Matrix <double> theta, double alpha, int numberIterations)
        {
            Stopwatch stopWatch = new Stopwatch();

            stopWatch.Start();

            Matrix <double> JHistory = new DenseMatrix(numberIterations, 1);

            for (int i = 0; i < numberIterations; i++)
            {
                var res = func(theta);

                var h    = res.Item1;
                var grad = res.Item2;
                JHistory[i, 0] = h;


                // "bold driver" - if we decrease the cost function, increase the learning rate by 5% but
                // in case when we increase the cost function, decrease the learning rate by 50%
                if (i > 0)
                {
                    if (JHistory[i, 0] < JHistory[i - 1, 0])
                    {
                        alpha += (double)0.05 * alpha;
                    }
                    else
                    {
                        alpha -= (double)0.5 * alpha;
                    }
                }

                theta = theta - grad * alpha;

                if (i > 0 && JHistory[i, 0] < JHistory[i - 1, 0] &&
                    Equalities.DoubleEquals(JHistory[i, 0], JHistory[i - 1, 0]))
                {
                    break;
                }
            }

            stopWatch.Stop();
            // Get the elapsed time as a TimeSpan value.
            TimeSpan ts = stopWatch.Elapsed;

            // Format and display the TimeSpan value.
            string elapsedTime = String.Format("{0:00}:{1:00}:{2:00}.{3:00}",
                                               ts.Hours, ts.Minutes, ts.Seconds, ts.Milliseconds / 10);

            Console.WriteLine("RunTime " + elapsedTime);

            return(Tuple.Create(theta, JHistory));
        }
コード例 #3
0
        public void TestCostFunction()
        {
            var tupleJAndGrad = NeuralNetwork.CostFunction(theta, X, y);

            var pathJ   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\J.txt");
            var matrixJ = (DenseMatrix)DelimitedReader.Read <double>(pathJ);

            var pathGrad   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\Grad.txt");
            var matrixGrad = (DenseMatrix)DelimitedReader.Read <double>(pathGrad);

            Assert.IsTrue(Equalities.DoubleEquals(tupleJAndGrad.Item1, matrixJ[0, 0]));
            Assert.IsTrue(MatricesEqual(tupleJAndGrad.Item2, matrixGrad));
        }
コード例 #4
0
        public void TestBackPropagationWithRegularisation()
        {
            NeuralNetwork.CostFunctionWithThetaParameter backProp =
                t =>
            {
                var backPropagationResult = NeuralNetwork.BackPropagation(Xm, ym, t, new List <int>()
                {
                    4
                }, 4, 1);
                return(backPropagationResult);
            };

            var bothThetas = NeuralNetwork.PackThetas(new List <Matrix <double> >()
            {
                theta1NN, theta2NN
            });
            var resultNumericalGradient  = NeuralNetwork.ComputeNumericalGradient(backProp, bothThetas);
            var pathNumericalGradients   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\NumericalGradients.txt");
            var matrixNumericalGradients = DelimitedReader.Read <double>(pathNumericalGradients);

            Assert.IsTrue(MatricesEqual(resultNumericalGradient, matrixNumericalGradients));

            var resultBackPropagation = NeuralNetwork.BackPropagation(Xm, ym, bothThetas, new List <int>()
            {
                4
            }, 4, 1);
            var pathGradientForBackPropagation   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\GradientForBackPropagation.txt");
            var matrixGradientForBackPropagation = DelimitedReader.Read <double>(pathGradientForBackPropagation);

            Assert.IsTrue(Equalities.DoubleEquals(resultBackPropagation.Item1, 3.46051055642594));
            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, matrixGradientForBackPropagation));

            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, resultNumericalGradient, 0.000000001));

            var resultGradientDescent = NeuralNetwork.GradientDescent(backProp, bothThetas, 1, 3000);
            var pathResultTheta       = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\ThetaAfterGradientDescentForBackProp.txt");
            var matrixResultTheta     = DelimitedReader.Read <double>(pathResultTheta);

            Assert.IsTrue(MatricesEqual(resultGradientDescent.Item1, matrixResultTheta));
            var resultCost = resultGradientDescent.Item2.ToRowWiseArray().LastOrDefault(elem => elem != 0.0d);

            Assert.IsTrue(Equalities.DoubleEquals(resultCost, 2.2493405784756875));
        }
コード例 #5
0
        public void TestGradientDescent()
        {
            Matrix <double> theta = new DenseMatrix(2, 1);

            theta[0, 0] = 1;
            theta[1, 0] = 1;

            double alpha         = 0.1;
            int    numIterations = 100;

            var result = NeuralNetwork.GradientDescent(Equation, theta, alpha, numIterations);

            var resultTheta = result.Item1;

            Assert.IsTrue(Equalities.DoubleEquals(resultTheta[0, 0], 0, 0.00000001));
            Assert.IsTrue(Equalities.DoubleEquals(resultTheta[1, 0], 0, 0.00000001));

            var resultCost = result.Item2.ToRowWiseArray().LastOrDefault(elem => elem != 0.0d);

            Assert.IsTrue(Equalities.DoubleEquals(resultCost, 0));
        }