Esempio n. 1
0
        /// <summary>
        /// Weight update by done by ResilientPropagation  learning rule
        /// Executed at the end of epoch (in batch mode) </summary>
        /// <param name="weight">  </param>
        protected internal virtual void resillientWeightUpdate(Weight weight)
        {
            // get resilient training data for the current weight
            ResilientWeightTrainingtData weightData = (ResilientWeightTrainingtData)weight.TrainingData;

            // multiply the current and previous gradient, and take the sign.
            // We want to see if the gradient has changed its sign.
            int gradientSignChange = sign(weightData.previousGradient * weightData.gradient);

            double weightChange = 0;  // weight change to apply (delta weight)
            double delta;             //  adaptation factor

            if (gradientSignChange > 0)
            {
                // if the gradient has retained its sign, then we increase delta (adaptation factor) so that it will converge faster
                delta = Math.Min(weightData.previousDelta * increaseFactor, maxDelta);
                //  weightChange = -sign(weightData.gradient) * delta; // if error is increasing (gradient is positive) then subtract delta, if error is decreasing (gradient negative) then add delta
                // note that our gradient has different sign eg. -dE_dw so we omit the minus here
                weightChange             = sign(weightData.gradient) * delta;
                weightData.previousDelta = delta;
            }
            else if (gradientSignChange < 0)
            {
                // if gradientSignChange<0, then the sign has changed, and the last weight change was too big
                delta = Math.Max(weightData.previousDelta * decreaseFactor, minDelta);
                // weightChange = - weightData.previousDelta;// 0;// -delta  - weightData.previousDelta; // ovo je problematicno treba da bude weightChange
                weightChange = -weightData.previousWeightChange;                 // if it skipped min in previous step go back
                // avoid double punishment
                weightData.gradient         = 0;
                weightData.previousGradient = 0;

                //move values in the past
                weightData.previousDelta = delta;
            }
            else if (gradientSignChange == 0)
            {
                // if gradientSignChange==0 then there is no change to the delta
                delta = weightData.previousDelta;
                //delta = weightData.previousGradient; // note that encog does this
                weightChange = sign(weightData.gradient) * delta;
            }

            weight.value += weightChange;
            weightData.previousWeightChange = weightChange;
            weightData.previousGradient     = weightData.gradient;         // as in moveNowValuesToPreviousEpochValues
            weightData.gradient             = 0;
        }
Esempio n. 2
0
        /// <summary>
        /// Calculate and sum gradients for each neuron's weight, the actual weight update is done in batch mode </summary>
        /// <seealso cref= ResilientPropagation#resillientWeightUpdate(org.neuroph.core.Weight)  </seealso>
        public override void updateNeuronWeights(Neuron neuron)
        {
            foreach (Connection connection in neuron.InputConnections)
            {
                double input = connection.Input;
                if (input == 0)
                {
                    continue;
                }

                // get the error for specified neuron,
                double neuronError = neuron.Error;
                // get the current connection's weight
                Weight weight = connection.Weight;
                // ... and get the object that stores reislient training data for that weight
                ResilientWeightTrainingtData weightData = (ResilientWeightTrainingtData)weight.TrainingData;

                // calculate the weight gradient (and sum gradients since learning is done in batch mode)
                weightData.gradient += neuronError * input;
            }
        }