public void TestTanhDerivative()
        {
            // SO-SO test =(
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders  = n.Derivatives();
            double[] ests  = n.Estimation(0.0001);
            var      koeff = ests[0] / ders[0];

            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ests[i] / ders[i], koeff, 0.00001);
            }
        }
        public void TestBackPropWithKnownValues()
        {
            NNetwork n = NetworkTest.XorNetwork();

            n.SetInput(new double[] { 1, 1 });
            n.SetAnswers(new double[] { 0 });
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(2);
            Assert.AreNotEqual(deltas[0], 0);
            Assert.AreNotEqual(deltas[1], 0);
            MyAssert.CloseTo(deltas[0], 0, 0.001);
            MyAssert.CloseTo(deltas[1], 0, 0.001);
        }
        public void CanApplyTrainingForWholeNetwork()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1, 2, 2, 1 });

            n.SetInput(new double[] { 0.3 });
            n.SetAnswers(new double[] { 0.8 });
            n.BackPropagate();
            var output_before = n.GetOutput();

            n.ApplyTraining();
            var output_after = n.GetOutput();

            Assert.AreNotEqual(output_after, output_before);
        }
        public void TestNetworkSetAnswerAndGetDelta()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 3, 2 });

            n.SetInput(new double[] { 0, 0 });
            double[] outputs = n.GetOutput();
            double[] answers = new double[] { 0.1, 0.9 };
            n.SetAnswers(answers);
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(3);
            for (int i = 0; i < answers.Length; i++)
            {
                MyAssert.CloseTo(deltas[i], answers[i] - outputs[i]);
            }
        }
        public static void TestTanhDerivative()
        {
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                Show(new[] { ders[i], ests[i], ests[i] / ders[i] });
            }
        }
        public void TestDerivative()
        {
            //Fails with square error function
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ders[i], ests[i], 0.0001);
            }
        }