Exemple #1
0
        public void ActivateNeuron()
        {
            BiasNeuron bias = new BiasNeuron();
            double     w0   = 0;

            neuron.Connect(bias, w0);

            Assert.Throws(typeof(NotConfiguredException),
                          () => neuron.Activation());
            InputNeuron i1 = new InputNeuron();
            InputNeuron i2 = new InputNeuron();
            InputNeuron i3 = new InputNeuron();

            i1.Input = 1;
            i2.Input = 1;
            i3.Input = 1;

            double w1 = 1;
            double w2 = 1;
            double w3 = 1;

            neuron.Connect(i1, w1);
            neuron.Connect(i2, w2);
            neuron.Connect(i3, w3);
            double tx = i1.Input * w1 + i2.Input * w2 + i3.Input * w3;
            double expected_activation = 1 / (1 + Math.Pow(Math.E, -tx));

            MyAssert.CloseTo(neuron.Activation(), expected_activation);
        }
Exemple #2
0
        public void TestParseLine()
        {
            string line  = "4-Oct-12,38.46,38.55,38.19,38.33,9654464";
            double entry = parser.ParseLine(line);

            MyAssert.CloseTo(38.33, entry);
        }
        public void TestTanhDerivative()
        {
            // SO-SO test =(
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders  = n.Derivatives();
            double[] ests  = n.Estimation(0.0001);
            var      koeff = ests[0] / ders[0];

            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ests[i] / ders[i], koeff, 0.00001);
            }
        }
Exemple #4
0
        public void TanhActivation()
        {
            TanhNeuron tn   = new TanhNeuron();
            BiasNeuron bias = new BiasNeuron();
            double     w0   = 0;

            tn.Connect(bias, w0);

            Assert.Throws(typeof(NotConfiguredException),
                          () => tn.Activation());
            InputNeuron i1 = new InputNeuron();
            InputNeuron i2 = new InputNeuron();
            InputNeuron i3 = new InputNeuron();

            i1.Input = 1;
            i2.Input = 1;
            i3.Input = 1;

            double w1 = 1;
            double w2 = 1;
            double w3 = 1;

            tn.Connect(i1, w1);
            tn.Connect(i2, w2);
            tn.Connect(i3, w3);
            double z = i1.Input * w1 + i2.Input * w2 + i3.Input * w3;
            double expected_activation = (Math.Exp(z) - Math.Exp(-z)) / (Math.Exp(z) + Math.Exp(-z));

            MyAssert.CloseTo(tn.Activation(), expected_activation);
        }
 public void HyperDenormalize()
 {
     double[] values = hyper_normalizer.GetValues();
     for (int i = 0; i < values.Length; i++)
     {
         MyAssert.CloseTo(initial_values[i], hyper_normalizer.Denormalize(values[i]));
     }
 }
        public void TestXNOR_Manualy()
        {
            Neuron     a3_1   = new Neuron();
            BiasNeuron bias_2 = new BiasNeuron();

            a3_1.Connect(bias_2);
            Neuron a2_1 = new Neuron();

            a3_1.Connect(a2_1);
            Neuron a2_2 = new Neuron();

            a3_1.Connect(a2_2);
            BiasNeuron bias_1 = new BiasNeuron();

            a2_1.Connect(bias_1);
            a2_2.Connect(bias_1);
            InputNeuron a1_1 = new InputNeuron();

            a2_1.Connect(a1_1);
            a2_2.Connect(a1_1);
            InputNeuron a1_2 = new InputNeuron();

            a2_1.Connect(a1_2);
            a2_2.Connect(a1_2);

            a3_1.SetWeight(0, -10);
            a3_1.SetWeight(1, 20);
            a3_1.SetWeight(2, 20);

            a2_1.SetWeight(0, -30);
            a2_1.SetWeight(1, 20);
            a2_1.SetWeight(2, 20);

            a2_2.SetWeight(0, 10);
            a2_2.SetWeight(1, -20);
            a2_2.SetWeight(2, -20);

            a1_1.Input = 0;
            a1_2.Input = 0;
            MyAssert.CloseTo(a3_1.Activation(), 1);

            a1_1.Input = 0;
            a1_2.Input = 1;
            MyAssert.CloseTo(a3_1.Activation(), 0);

            a1_1.Input = 1;
            a1_2.Input = 0;
            MyAssert.CloseTo(a3_1.Activation(), 0);

            a1_1.Input = 1;
            a1_2.Input = 1;
            MyAssert.CloseTo(a3_1.Activation(), 1);
        }
        public void TestBackPropWithKnownValues()
        {
            NNetwork n = NetworkTest.XorNetwork();

            n.SetInput(new double[] { 1, 1 });
            n.SetAnswers(new double[] { 0 });
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(2);
            Assert.AreNotEqual(deltas[0], 0);
            Assert.AreNotEqual(deltas[1], 0);
            MyAssert.CloseTo(deltas[0], 0, 0.001);
            MyAssert.CloseTo(deltas[1], 0, 0.001);
        }
        public void TestNetworkSetAnswerAndGetDelta()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 3, 2 });

            n.SetInput(new double[] { 0, 0 });
            double[] outputs = n.GetOutput();
            double[] answers = new double[] { 0.1, 0.9 };
            n.SetAnswers(answers);
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(3);
            for (int i = 0; i < answers.Length; i++)
            {
                MyAssert.CloseTo(deltas[i], answers[i] - outputs[i]);
            }
        }
        public void TestSimplestConnection()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1, 1 });

            n.SetWeightMatrix(new double[][]
            {
                new double[] { 1, 1 },
                new double[] { 1, 1 }
            });
            n.SetInput(new double[] { 1 });
            var output  = n.GetOutput()[0];
            var desired = 1 / (1 + Math.Pow(Math.E, -2));

            MyAssert.CloseTo(output, desired);
        }
        public void HyperNormalize()
        {
            double[] values = hyper_normalizer.GetValues();
            double   min    = values[0];
            double   max    = values[0];

            foreach (var value in values)
            {
                Assert.GreaterOrEqual(value, -1);
                Assert.LessOrEqual(value, 1);
                if (min > value)
                {
                    min = value;
                }
                if (max < value)
                {
                    max = value;
                }
            }
            MyAssert.CloseTo(min * DataNormalizer.SAFE_KOEFF, -1, 0.025);
            MyAssert.CloseTo(max * DataNormalizer.SAFE_KOEFF, 1, 0.025);
        }
        public void TestCanSetAnswer()
        {
            Neuron n = new Neuron();

            n.Connect(new BiasNeuron(), 1);
            InputNeuron input = new InputNeuron();

            n.Connect(input, 1);
            n.SetWeight(0, 1);
            input.Input = 1;
            var desired = 1 / (1 + Math.Pow(Math.E, -2));

            n.SetAnswer(desired);
            n.PropagateBackwards();
            MyAssert.CloseTo(n.GetDelta(), 0);

            /*
             * 1. If answer was set, than calculate like last layer,
             *  otherwise require theta and delta.
             * 2. In back prop every layer calculates it's values and
             *  sets theta+delta for every connected neuron.
             */
        }
        public void TestDerivative()
        {
            //Fails with square error function
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ders[i], ests[i], 0.0001);
            }
        }
        public void TestXNORAuto()
        {
            NNetwork n = XorNetwork();

            double[] input = new double[] { 0, 0 };
            n.SetInput(input);
            double[] output = n.GetOutput();
            MyAssert.CloseTo(output[0], 1);

            input = new double[] { 0, 1 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 0);

            input = new double[] { 1, 0 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 0);

            input = new double[] { 1, 1 };
            n.SetInput(input);
            output = n.GetOutput();
            MyAssert.CloseTo(output[0], 1);
        }
Exemple #14
0
 public void TestParseFile()
 {
     double[] entries = parser.ParseFile(path_cola);
     Assert.AreEqual(1272, entries.Length);
     MyAssert.CloseTo(29.33, entries[entries.Length - 1]);
 }