Beispiel #1
0
        internal void Apply(NeuralTrainLayer neuralTrainLayer, float learningRate)
        {
            if (neuralTrainLayer.TrainingCount == 0)
            {
                return;
            }
            float multiplyer = learningRate / neuralTrainLayer.TrainingCount;

            float[] bShift = neuralTrainLayer.DCDB.Multiply(multiplyer);
            float[] wShift = neuralTrainLayer.DCDW.Multiply(multiplyer);

            Bias   = Bias.Minus(bShift);
            Weight = Weight.Minus(wShift);
        }
Beispiel #2
0
        public void UpdateWeights(Matrix targets, double learningRate)
        {
            if (Next == null)
            {
                Errors = Matrix.Subtract(targets, Outputs);

                Matrix gradient = Matrix.Copy(Outputs);
                gradient.Map(x => ActivationFc_derivitiv(x));
                gradient.Multiply(Errors);
                gradient.Multiply(learningRate);

                //  Matrix Weight_T = Matrix.Transpose(Inputs);
                Matrix delta = Matrix.Multiply(gradient, Matrix.Transpose(Inputs));

                Weights.Add(delta);
                Bias.Add(gradient);
            }
            else
            {
                Errors = Matrix.Multiply(Matrix.Transpose(Next.Weights), Next.Errors);

                Matrix gradient = Matrix.Copy(Outputs);
                gradient.Map(x => ActivationFc_derivitiv(x));
                gradient.Multiply(Errors);
                gradient.Multiply(learningRate);

                Matrix delta = Matrix.Multiply(gradient, Matrix.Transpose(Inputs));

                Weights.Add(delta);
                Bias.Add(gradient);
            }
            if (Prev != null)
            {
                Prev.UpdateWeights(targets, learningRate);
            }
        }
 public OutputNeuron(ITransferFunction tFunc) : base(tFunc)
 {
     Bias = new Bias();
 }
 public sealed override void CalculateOutput()
 {
     Output += Bias.Calculate();
     Output  = base.TransferFunction.Evaluate(Output);
 }