Пример #1
0
        public void TestBackPropagationWithoutRegularisaion()
        {
            NeuralNetwork.CostFunctionWithThetaParameter backProp =
                t =>
            {
                var backPropagationResult = NeuralNetwork.BackPropagation(Xm, ym, t, new List <int>()
                {
                    4
                }, 4, 0);
                return(backPropagationResult);
            };

            var bothThetas = NeuralNetwork.PackThetas(new List <Matrix <double> >()
            {
                theta1NN, theta2NN
            });
            var resultNumericalGradient  = NeuralNetwork.ComputeNumericalGradient(backProp, bothThetas);
            var pathNumericalGradients   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\NumericalGradientForBackPropagationWithoutRegularisation.txt");
            var matrixNumericalGradients = DelimitedReader.Read <double>(pathNumericalGradients);

            Assert.IsTrue(MatricesEqual(resultNumericalGradient, matrixNumericalGradients));

            var resultBackPropagation = NeuralNetwork.BackPropagation(Xm, ym, bothThetas, new List <int>()
            {
                4
            }, 4, 0);
            var pathGradientForBackPropagation   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\GradientForBackPropagationWithoutRegularisation.txt");
            var matrixGradientForBackPropagation = DelimitedReader.Read <double>(pathGradientForBackPropagation);

            Assert.IsTrue(Equalities.DoubleEquals(resultBackPropagation.Item1, 3.08744915815864));
            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, matrixGradientForBackPropagation));
            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, resultNumericalGradient, 0.000000001));
        }
Пример #2
0
        static void Main(string[] args)
        {
            Utils.SetNumberDecimalSeparatorToDotInCultureSettings();

            var x_data = DelimitedReader.Read <double>(Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\x_data.txt"));
            var y_data = DelimitedReader.Read <double>(Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\y_data.txt"));

            NeuralNetwork.CostFunctionWithThetaParameter backProp =
                tt =>
            {
                var backPropagationResult = NeuralNetwork.BackPropagation(x_data, y_data, tt, new List <int>()
                {
                    25
                }, 10, 0);
                return(backPropagationResult);
            };

            var thetas = NeuralNetwork.RandomInitialiseWeights(400, 10, new List <int>()
            {
                25
            });

            var theta = NeuralNetwork.PackThetas(thetas);

            var resultGradientDescent = NeuralNetwork.GradientDescent(backProp, theta, 1, 300);

            DelimitedWriter.Write("..\\..\\TestData\\ThetaGradient.txt", resultGradientDescent.Item1, "thetaGradient");
            DelimitedWriter.Write("..\\..\\TestData\\JHistory.txt", resultGradientDescent.Item2, "JHistory");

            var resultTheta = resultGradientDescent.Item1;

            //var resultComputeNumericalGradient = NeuralNetwork.ComputeNumericalGradient(backProp, resultTheta);
            //resultComputeNumericalGradient.Save("D:/test/numericalGradients.txt");

            var result5 = NeuralNetwork.BackPropagation(x_data, y_data, resultTheta, new List <int>(25), 10, 0);
            var JJ      = result5.Item1;

            DelimitedWriter.Write("..\\..\\TestData\\GradientForBackPropagation.txt", result5.Item2, "gradBackPropagation");

            var resultThetaLoadedFromFile = (DenseMatrix)DelimitedReader.Read <double>("..\\..\\TestData\\thetaGradient.txt");
            var thetasList = NeuralNetwork.UnpackThetas(resultThetaLoadedFromFile, 400, new List <int>()
            {
                25
            }, 10);

            var result = NeuralNetwork.GetPredictions(x_data, y_data, thetasList);

            Console.WriteLine(result.Item1);

            DelimitedWriter.Write("..\\..\\TestData\\Predictions.txt", result.Item2, "");
        }
Пример #3
0
        public void TestBackPropagationWithRegularisation()
        {
            NeuralNetwork.CostFunctionWithThetaParameter backProp =
                t =>
            {
                var backPropagationResult = NeuralNetwork.BackPropagation(Xm, ym, t, new List <int>()
                {
                    4
                }, 4, 1);
                return(backPropagationResult);
            };

            var bothThetas = NeuralNetwork.PackThetas(new List <Matrix <double> >()
            {
                theta1NN, theta2NN
            });
            var resultNumericalGradient  = NeuralNetwork.ComputeNumericalGradient(backProp, bothThetas);
            var pathNumericalGradients   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\NumericalGradients.txt");
            var matrixNumericalGradients = DelimitedReader.Read <double>(pathNumericalGradients);

            Assert.IsTrue(MatricesEqual(resultNumericalGradient, matrixNumericalGradients));

            var resultBackPropagation = NeuralNetwork.BackPropagation(Xm, ym, bothThetas, new List <int>()
            {
                4
            }, 4, 1);
            var pathGradientForBackPropagation   = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\GradientForBackPropagation.txt");
            var matrixGradientForBackPropagation = DelimitedReader.Read <double>(pathGradientForBackPropagation);

            Assert.IsTrue(Equalities.DoubleEquals(resultBackPropagation.Item1, 3.46051055642594));
            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, matrixGradientForBackPropagation));

            Assert.IsTrue(MatricesEqual(resultBackPropagation.Item2, resultNumericalGradient, 0.000000001));

            var resultGradientDescent = NeuralNetwork.GradientDescent(backProp, bothThetas, 1, 3000);
            var pathResultTheta       = Path.Combine(Utils.GetAssemblyPath(), "..\\..\\TestData\\ThetaAfterGradientDescentForBackProp.txt");
            var matrixResultTheta     = DelimitedReader.Read <double>(pathResultTheta);

            Assert.IsTrue(MatricesEqual(resultGradientDescent.Item1, matrixResultTheta));
            var resultCost = resultGradientDescent.Item2.ToRowWiseArray().LastOrDefault(elem => elem != 0.0d);

            Assert.IsTrue(Equalities.DoubleEquals(resultCost, 2.2493405784756875));
        }