Пример #1
0
 public double CalculateGradient(double?target = null)                  //This function is used in back propagation and uses many techniques such as direvative of sigmoid function
 {
     if (target == null)
     {
         return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value)); //Calculate the error for each neuron in hidden layer
     }
     return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));                                    //Calculate the error in the output layer
 }
Пример #2
0
 /// <summary>
 /// Calculates the gradient.
 /// </summary>
 /// <param name="target">The target.</param>
 /// <returns></returns>
 public double CalculateGradient(double?target = null)
 {
     if (target == null)
     {
         return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * Sigmoid.Derivative(Value));
     }
     return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));
 }
Пример #3
0
        /*** Added by Benson ***/
        public void UpdateInput(double learnRate)
        {
            // calculate the gradient
            double inputGradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight);

            // update the input value
            Value += learnRate * inputGradient;
        }
Пример #4
0
 public double CalculateSigmoidError(double?target = null)
 {
     if (target == null)
     {
         return(Error = OutputSynapses.Sum(a => a.OutputNeuron.Error * a.Weight) * Sigmoid.Derivative(Output));
     }
     return(Error = Sigmoid.Derivative(Output) * CalculateError(target.Value));
 }
Пример #5
0
    public float CalculateGradient(float?target = null)
    {
        if (target == null)
        {
            return(Gradient = OutputSynapses.Sum(x => x.OutputNeuron.Gradient * x.Weight) * Sigmoid.Derivative(Value));
        }

        return(Gradient = CalculateError(target.Value) * Sigmoid.Derivative(Value));
    }
Пример #6
0
        //误差值,如果有target代表是输出层
        //误差值,如果没有target,是隐藏层,隐藏层的误差是前一层传播过来的,结果是对连接到改节点的所有突触的权重*所连接节点的误差求和。
        public double CalculateError(double?target = null)
        {
            if (target == null)
            {
                return(Error = OutputSynapses.Sum(a => a.Weight * a.OutputNeuron.Error));
            }

            return(Error = target.Value - OutputValue);
        }
 /// <summary>
 /// Update the weights of the InputSynapses in order to gain in precision
 /// </summary>
 /// <param name="_learnRate">learning rate of the Neural Network</param>
 public IEnumerator UpdateSynapses(float _learnRate)
 {
     LocalGradient = NeuralMath.SigmoidDerivative(Value) * (OutputSynapses.Sum(s => (s.OutPutNeuron.LocalGradient * s.Weight)));
     foreach (Synapse synapse in InputSynapses)
     {
         synapse.WeightDelta = synapse.Weight - (_learnRate * (synapse.InputNeuron.Value * LocalGradient));
         yield return(new WaitForEndOfFrame());
     }
 }
Пример #8
0
        //Obliczanie gradientu
        public double CalculateGradient(double?target = null)
        {
            //Dla neuronów warstw ukrytych
            if (target == null)
            {
                return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * ActivationFunction.Derivative(Value));
            }

            //Dla neuronów warstwy wyjściowej
            return(Gradient = CalculateError(target.Value) * ActivationFunction.Derivative(Value));
        }
Пример #9
0
        public double CalculateGradient(double?target = null)
        {
            var derivative = ActivationFunction.Derivative(Value);

            if (target != null)
            {
                Gradient = derivative * CalculateError(target.Value);
            }
            else
            {
                Gradient = derivative * OutputSynapses.Sum(synapse => synapse.OutputNeuron.Gradient * synapse.Weight);
            }

            return(Gradient);
        }
Пример #10
0
Файл: Neuron.cs Проект: dm-j/nn
 public double CalculateGradient() =>
 Gradient = OutputSynapses.Sum(synapse => synapse.OutputNeuron.Gradient * synapse.Weight) * Derivative(Value);
 public double CalculateGradient()
 {
     return(Gradient = OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * CalculateDerivative());
 }
Пример #12
0
 public double CalculateGradient()
 {
     return(Gradient =
                OutputSynapses.Sum(a => a.OutputNeuron.Gradient * a.Weight) * _dact(Value));
 }