コード例 #1
0
 public double CalculateGradient(double?target = null)                  //This function is used in back propagation and uses many techniques such as direvative of sigmoid function
 {
     if (target == null)
     {
         return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value)); //Calculate the error for each neuron in hidden layer
     }
     return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));                                    //Calculate the error in the output layer
 }
コード例 #2
0
 public double CalculateSigmoidError(double?target = null)
 {
     if (target == null)
     {
         return(Error = OutputSynapses.Sum(a => a.OutputNeuron.Error * a.Weight) * Sigmoid.Derivative(Output));
     }
     return(Error = Sigmoid.Derivative(Output) * CalculateError(target.Value));
 }
コード例 #3
0
ファイル: Neuron.cs プロジェクト: Fer9897/AkinatorNeuralNet
        public double CalculateGradient(double?target = null)
        {
            if (target == null)
            {
                return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value));
            }

            return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));
        }
コード例 #4
0
 public float CalculateGradient(float?target = null)
 {
     if (target == null)
     {
         //return Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value);
         float sum = 0f;
         for (int i = 0; i < OutputSynapses.Count; i++)
         {
             var synapse = OutputSynapses[i];
             sum += synapse.OutputNeuron.Gradient * synapse.Weight;
         }
         return(Gradient = sum * Sigmoid.Derivative(Value));
     }
     return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));
 }
コード例 #5
0
        public void BackPropagate(Pair pair)
        {
            var input  = pair.input.ToArray();
            var output = pair.output.ToArray();

            ForwardPass(input);

            for (var i = 0; i < outputLayer.neurons.Count; i++)
            {
                var neuron         = outputLayer.neurons[i];
                var outputA        = neuron.outputA;
                var expectedOutput = output[i];
                neuron.error = expectedOutput - outputA;
            }

            for (var layer = outputLayer.previous; layer != null; layer = layer.previous)
            {
                foreach (var neuron in layer.neurons)
                {
                    neuron.error = 0;
                    foreach (var connection in neuron.outputs)
                    {
                        var error  = connection.output.error;
                        var weight = connection.weight;
                        neuron.error += error * weight;
                    }
                }
            }

            foreach (var connection in connections)
            {
                var error           = connection.output.error;
                var gradient        = Sigmoid.Derivative(connection.output.outputA);
                var previousOutputA = connection.input.outputA;
                connection.weight += learning_rate * error * gradient * previousOutputA;
            }

            for (var layer = inputLayer; layer != null; layer = layer.next)
            {
                foreach (var neuron in layer.neurons)
                {
                    var error    = neuron.error;
                    var gradient = Sigmoid.Derivative(neuron.outputA);
                    neuron.bias += learning_rate * error * gradient;
                }
            }
        }
コード例 #6
0
        public void BackPropagate(Batch batch)
        {
            var learning_rate = 0.01;

            foreach (var neuron in neurons)
            {
                neuron.error = 0;
            }

            foreach (var connection in connections)
            {
                connection.costCorrection = 0;
            }

            foreach (var pair in batch)
            {
                var input  = pair.input.ToArray();
                var output = pair.output.ToArray();

                ForwardPass(input);

                for (var i = 0; i < outputLayer.neurons.Count; i++)
                {
                    var neuron         = outputLayer.neurons[i];
                    var outputA        = neuron.outputA;
                    var expectedOutput = output[i];
                    var error          = expectedOutput - outputA;
                    var dCdA           = 2 * error;
                    neuron.error += dCdA / batch.Count;
                }

                for (var layer = outputLayer.previous; layer != null; layer = layer.previous)
                {
                    foreach (var neuron in layer.neurons)
                    {
                        foreach (var connection in neuron.outputs)
                        {
                            var dCdA = connection.output.error;
                            var dAdZ = Sigmoid.Derivative(connection.output.outputA);
                            var dZdA = connection.weight;
                            var dZdW = connection.input.outputA;
                            neuron.error += dCdA * dAdZ * dZdA / batch.Count;
                            connection.costCorrection += dCdA * dAdZ * dZdW / batch.Count;
                        }
                    }
                }
            }

            foreach (var connection in connections)
            {
                connection.weight += connection.costCorrection * learning_rate;
            }

            for (var layer = inputLayer; layer != null; layer = layer.next)
            {
                foreach (var neuron in layer.neurons)
                {
                    var dCdA = neuron.error;
                    var dAdZ = Sigmoid.Derivative(neuron.outputA);
                    var dZdB = 1;
                    neuron.bias += dCdA * dAdZ * dZdB * learning_rate;
                }
            }
        }