Ejemplo n.º 1
0
        public void IfInputNeuronsWereChangedThanBeforeCalculatingOutputInvalidateCache()
        {
            NNetwork n = XorNetwork();

            n.CacheEnabled = true;
            n.SetInput(new double[] { 0, 0 });
            var first = n.GetOutput();

            n.SetInput(new double[] { 1, 0 });
            var second = n.GetOutput();

            Assert.AreNotEqual(first, second);
        }
Ejemplo n.º 2
0
        public static void TestTanhLearningOnSinus()
        {
            NNetwork network = NNetwork.HyperbolicNetwork(new int[] { 1, 2, 1 });

            network.RandomizeWeights(1, 2);
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] inputs  = SinusTrainSet()[0];
            double[][] outputs = SinusTrainSet()[1];
            double     error   = 1;
            double     delta   = 1;
            int        j       = 0;

            for (; error > 0.01 && !(delta <= 0.000001) || j == 1; j++)
            {
                trainer.TrainClassification(inputs, outputs);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            double[][] input_test  = SinusTrainSet(20)[0];
            double[][] output_test = SinusTrainSet(20)[1];
            trainer.IsLearning = false;
            trainer.TrainClassification(input_test, output_test);
            error = trainer.GetError();
            Console.Out.WriteLine(error);
            for (int i = 0; i < input_test.Length; i++)
            {
                network.SetInput(input_test[i]);
                Show(new [] { input_test[i][0], network.GetOutput()[0], Math.Sin(input_test[i][0]) });
            }
        }
Ejemplo n.º 3
0
        public void TestTanhDerivative()
        {
            // SO-SO test =(
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders  = n.Derivatives();
            double[] ests  = n.Estimation(0.0001);
            var      koeff = ests[0] / ders[0];

            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ests[i] / ders[i], koeff, 0.00001);
            }
        }
Ejemplo n.º 4
0
        public void TestInputEqualsOutput()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1 });

            n.SetWeightMatrix(new double[][]
            {
                new double[] { 1, 1 }
            });
            n.SetInput(new double[] { 9 });
            Assert.AreEqual(n.GetOutput()[0], 9);
        }
Ejemplo n.º 5
0
        public void TestBackPropWithKnownValues()
        {
            NNetwork n = NetworkTest.XorNetwork();

            n.SetInput(new double[] { 1, 1 });
            n.SetAnswers(new double[] { 0 });
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(2);
            Assert.AreNotEqual(deltas[0], 0);
            Assert.AreNotEqual(deltas[1], 0);
            MyAssert.CloseTo(deltas[0], 0, 0.001);
            MyAssert.CloseTo(deltas[1], 0, 0.001);
        }
Ejemplo n.º 6
0
        public void ActivateCachingAcrossAllNetwork()
        {
            NNetwork n = XorNetwork();

            n.CacheEnabled = false;
            n.SetInput(new double[] { 0, 0 });
            long without_cache = MyAssert.MeasureMethod(() => n.GetOutput(), 400);

            n.CacheEnabled = true;
            long with_cache = MyAssert.MeasureMethod(() => n.GetOutput(), 400);

            Assert.Greater(without_cache / with_cache, 1.9);
        }
Ejemplo n.º 7
0
        public void CanApplyTrainingForWholeNetwork()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1, 2, 2, 1 });

            n.SetInput(new double[] { 0.3 });
            n.SetAnswers(new double[] { 0.8 });
            n.BackPropagate();
            var output_before = n.GetOutput();

            n.ApplyTraining();
            var output_after = n.GetOutput();

            Assert.AreNotEqual(output_after, output_before);
        }
Ejemplo n.º 8
0
        public void TestNetworkSetAnswerAndGetDelta()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 3, 2 });

            n.SetInput(new double[] { 0, 0 });
            double[] outputs = n.GetOutput();
            double[] answers = new double[] { 0.1, 0.9 };
            n.SetAnswers(answers);
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(3);
            for (int i = 0; i < answers.Length; i++)
            {
                MyAssert.CloseTo(deltas[i], answers[i] - outputs[i]);
            }
        }
Ejemplo n.º 9
0
        public void TestXNORAuto()
        {
            NNetwork n = XorNetwork();

            double[] input = new double[] { 0, 0 };
            n.SetInput(input);
            double[] output = n.GetOutput();
            MyAssert.CloseTo(output[0], 1);

            input = new double[] { 0, 1 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 0);

            input = new double[] { 1, 0 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 0);

            input = new double[] { 1, 1 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 1);
        }
Ejemplo n.º 10
0
        public void TestSimplestConnection()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1, 1 });

            n.SetWeightMatrix(new double[][]
            {
                new double[] { 1, 1 },
                new double[] { 1, 1 }
            });
            n.SetInput(new double[] { 1 });
            var output  = n.GetOutput()[0];
            var desired = 1 / (1 + Math.Pow(Math.E, -2));

            MyAssert.CloseTo(output, desired);
        }
Ejemplo n.º 11
0
        public static void TestTanhDerivative()
        {
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                Show(new[] { ders[i], ests[i], ests[i] / ders[i] });
            }
        }
Ejemplo n.º 12
0
//        public static void Main()
//        {
//            TrainPrediction();
////            Sinus();
////            TestTanhLearningOnSinus();
////            TestTanhDerivative();
//
//        }

        public static void TrainPrediction()
        {
            NNetwork network = NNetwork.SigmoidNetwork(new int[] { 5, 1 });

            network.RandomizeWeights(-1, 20);
            NetworkTrainer trainer = new NetworkTrainer(network);
            List <double>  tr      = new List <double>();

            for (double i = 0; i <= 1; i = i + 0.05)
            {
                tr.Add(i);
            }
            double[] train_set = tr.ToArray();//new double[] { 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1 };
            double   error     = 1;
            double   delta     = 1;
            int      j         = 0;

            for (; error > 0.01 && !(delta <= 0.00001) || j == 1; j++)
            {
                trainer.TrainPrediction(train_set, 0.0001, 0.2);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            Console.Out.WriteLine(j + ": " + error);
            for (double i = 0; i <= 0.5; i = i + 0.05)
            {
                network.SetInput(new double[] { i + 0.0, i + 0.1, i + 0.2, i + 0.3, i + 0.4 });
                Show(new double[]
                {
                    i + 0.5,
                    network.GetOutput()[0],
//                        network.GetOutput()[1]
                });
            }
        }
Ejemplo n.º 13
0
        public void TestDerivative()
        {
            //Fails with square error function
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ders[i], ests[i], 0.0001);
            }
        }