public void TestAccumulatesWeightShift()
        {
            Neuron n31 = new Neuron();

            BiasNeuron bias2 = new BiasNeuron(); ;
            Neuron n21 = new Neuron();
            n31.Connect(bias2);
            n31.Connect(n21);

            InputNeuron input = new InputNeuron();
            BiasNeuron bias1 = new BiasNeuron();
            n21.Connect(bias1);
            n21.Connect(input);

            input.Input = 1;
            n31.SetAnswer(0.9);

            double[] ws = n31.GetWeightShifts();
            double acc = ws[1];
            Assert.AreEqual(acc, 0);

            n31.PropagateBackwards();
            ws = n31.GetWeightShifts();
            acc = ws[1];
            Assert.AreNotEqual(acc, 0);

            n31.ApplyTraining(0, 1);
            ws = n31.GetWeightShifts();
            acc = ws[1];
            Assert.AreEqual(acc, 0);
        }
        public void ActivateNeuron()
        {
            BiasNeuron bias = new BiasNeuron();
            double w0 = 0;
            neuron.Connect(bias, w0);

            Assert.Throws(typeof(NotConfiguredException),
                          () => neuron.Activation());
            InputNeuron i1 = new InputNeuron();
            InputNeuron i2 = new InputNeuron();
            InputNeuron i3 = new InputNeuron();

            i1.Input = 1;
            i2.Input = 1;
            i3.Input = 1;

            double w1 = 1;
            double w2 = 1;
            double w3 = 1;

            neuron.Connect(i1, w1);
            neuron.Connect(i2, w2);
            neuron.Connect(i3, w3);
            double tx = i1.Input*w1+i2.Input*w2+i3.Input*w3;
            double expected_activation = 1/(1 + Math.Pow(Math.E, -tx));
            MyAssert.CloseTo(neuron.Activation(), expected_activation);
        }
        public void LastLayerGivesDeltasAndWeightsToTheOneBefore()
        {
            Neuron n31 = new Neuron();
            Neuron n32 = new Neuron();

            BiasNeuron bias2 = new BiasNeuron(); ;
            Neuron n21 = new Neuron();
            Neuron n22 = new Neuron();
            n31.Connect(bias2);
            n31.Connect(n21);
            n31.Connect(n22);
            n32.Connect(bias2);
            n32.Connect(n21);
            n32.Connect(n22);

            InputNeuron input = new InputNeuron();
            BiasNeuron bias1 = new BiasNeuron();
            n21.Connect(bias1);
            n21.Connect(input);
            n22.Connect(bias1);
            n22.Connect(input);

            input.Input = 1;
            n31.SetAnswer(0.9);
            n32.SetAnswer(0.1);
            n31.PropagateBackwards();
            n32.PropagateBackwards();
            double delta31 = n31.GetDelta();
            double delta32 = n32.GetDelta();
            double n21_n31 = n31.Weights[1];
            double n21_n32 = n32.Weights[1];
            n21.PropagateBackwards();
            double desired_delta_for_n21 = n21_n31*delta31 + n21_n32*delta32;
            Assert.AreEqual(desired_delta_for_n21, n21.GetDelta());
        }
Exemple #4
0
 private void ConstructLayer(int[] neurons_in_layers_without_bias, int layer)
 {
     for (int i = 0; i < neurons_in_layers_without_bias[layer] + 1; i++)
     {
         INeuron nn;
         if (i == 0)
         {
             nn = new BiasNeuron();
         }
         else
         {
             if (layer == 0)
             {
                 nn = new InputNeuron();
             }
             else if (layer == LayerCount - 2)
             {
                 nn = CreateNeuron();
             }
             else
             {
                 nn = is_combined ? new TanhNeuron() : new Neuron();
             }
         }
         neurons[layer][i] = nn;
         ConnectNeuronToLayer(nn, layer);
     }
 }
Exemple #5
0
        private void ConstructOutputLayer(int[] neurons_in_layers_without_bias)
        {
            int layer = neurons_in_layers_without_bias.Length - 1;

            neurons[layer][0] = new BiasNeuron();
            for (int i = 1; i < neurons_in_layers_without_bias[layer] + 1; i++)
            {
                if (layer == 0)
                {
                    neurons[layer][i] = new InputNeuron();
                }
                else
                {
                    neurons[layer][i] = CreateNeuron();
                }
            }
        }
        public void ThrowsIfPropagateTwice()
        {
            Neuron n31 = new Neuron();

            BiasNeuron bias2 = new BiasNeuron(); ;
            Neuron n21 = new Neuron();
            n31.Connect(bias2);
            n31.Connect(n21);

            InputNeuron input = new InputNeuron();
            BiasNeuron bias1 = new BiasNeuron();
            n21.Connect(bias1);
            n21.Connect(input);

            input.Input = 1;
            n31.SetAnswer(1);
            n31.PropagateBackwards();
            Assert.Throws(typeof (CannotPropagateWithEmptyAcc), () => n31.PropagateBackwards());
        }
        public void TestXNOR_Manualy()
        {
            Neuron a3_1 = new Neuron();
            BiasNeuron bias_2 = new BiasNeuron();
            a3_1.Connect(bias_2);
            Neuron a2_1 = new Neuron();
            a3_1.Connect(a2_1);
            Neuron a2_2 = new Neuron();
            a3_1.Connect(a2_2);
            BiasNeuron bias_1 = new BiasNeuron();
            a2_1.Connect(bias_1);
            a2_2.Connect(bias_1);
            InputNeuron a1_1 = new InputNeuron();
            a2_1.Connect(a1_1);
            a2_2.Connect(a1_1);
            InputNeuron a1_2 = new InputNeuron();
            a2_1.Connect(a1_2);
            a2_2.Connect(a1_2);

            a3_1.SetWeight(0, -10);
            a3_1.SetWeight(1, 20);
            a3_1.SetWeight(2, 20);

            a2_1.SetWeight(0, -30);
            a2_1.SetWeight(1, 20);
            a2_1.SetWeight(2, 20);

            a2_2.SetWeight(0, 10);
            a2_2.SetWeight(1, -20);
            a2_2.SetWeight(2, -20);

            a1_1.Input = 0;
            a1_2.Input = 0;
            MyAssert.CloseTo(a3_1.Activation(), 1);

            a1_1.Input = 0;
            a1_2.Input = 1;
            MyAssert.CloseTo(a3_1.Activation(), 0);

            a1_1.Input = 1;
            a1_2.Input = 0;
            MyAssert.CloseTo(a3_1.Activation(), 0);

            a1_1.Input = 1;
            a1_2.Input = 1;
            MyAssert.CloseTo(a3_1.Activation(), 1);
        }
        public void TanhActivation()
        {
            TanhNeuron tn = new TanhNeuron();
            BiasNeuron bias = new BiasNeuron();
            double w0 = 0;
            tn.Connect(bias, w0);

            Assert.Throws(typeof(NotConfiguredException),
                          () => tn.Activation());
            InputNeuron i1 = new InputNeuron();
            InputNeuron i2 = new InputNeuron();
            InputNeuron i3 = new InputNeuron();

            i1.Input = 1;
            i2.Input = 1;
            i3.Input = 1;

            double w1 = 1;
            double w2 = 1;
            double w3 = 1;

            tn.Connect(i1, w1);
            tn.Connect(i2, w2);
            tn.Connect(i3, w3);
            double z = i1.Input * w1 + i2.Input * w2 + i3.Input * w3;
            double expected_activation = (Math.Exp(z) - Math.Exp(-z)) / (Math.Exp(z) + Math.Exp(-z));
            MyAssert.CloseTo(tn.Activation(), expected_activation);
        }
 private void ConstructOutputLayer(int[] neurons_in_layers_without_bias)
 {
     int layer = neurons_in_layers_without_bias.Length - 1;
     neurons[layer][0] = new BiasNeuron();
     for (int i = 1; i < neurons_in_layers_without_bias[layer] + 1; i++)
     {
         if (layer == 0)
         {
             neurons[layer][i] = new InputNeuron();
         }
         else
         {
             neurons[layer][i] = CreateNeuron();
         }
     }
 }
 private void ConstructLayer(int[] neurons_in_layers_without_bias, int layer)
 {
     for (int i = 0; i < neurons_in_layers_without_bias[layer] + 1; i++)
     {
         INeuron nn;
         if (i == 0)
         {
             nn = new BiasNeuron();
         }
         else
         {
             if (layer == 0)
             {
                 nn = new InputNeuron();
             }
             else if (layer == LayerCount - 2)
             {
                 nn = CreateNeuron();
             }
             else
             {
                 nn = is_combined ? new TanhNeuron() : new Neuron();
             }
         }
         neurons[layer][i] = nn;
         ConnectNeuronToLayer(nn, layer);
     }
 }