/// <inheritdoc/> protected override void LearnConnection(IFreeformConnection connection) { double gradient = connection.GetTempTraining(0); double delta = (gradient * _learningRate) + (connection.GetTempTraining(1) * _momentum); connection.SetTempTraining(1, delta); connection.Weight += delta; }
/// <inheritdoc /> protected override void LearnConnection(IFreeformConnection connection) { // multiply the current and previous gradient, and take the // sign. We want to see if the gradient has changed its sign. int change = EncogMath .Sign(connection .GetTempTraining(TempGradient) * connection .GetTempTraining(TempLastGradient)); double weightChange = 0; // if the gradient has retained its sign, then we increase the // delta so that it will converge faster if (change > 0) { double delta = connection .GetTempTraining(TempUpdate) * RPROPConst.PositiveEta; delta = Math.Min(delta, _maxStep); weightChange = EncogMath .Sign(connection .GetTempTraining(TempGradient)) * delta; connection.SetTempTraining( TempUpdate, delta); connection .SetTempTraining( TempLastGradient, connection .GetTempTraining(TempGradient)); } else if (change < 0) { // if change<0, then the sign has changed, and the last // delta was too big double delta = connection .GetTempTraining(TempUpdate) * RPROPConst.NegativeEta; delta = Math.Max(delta, RPROPConst.DeltaMin); connection.SetTempTraining( TempUpdate, delta); weightChange = -connection .GetTempTraining(TempLastWeightDelta); // set the previous gradient to zero so that there will be no // adjustment the next iteration connection.SetTempTraining( TempLastGradient, 0); } else if (change == 0) { // if change==0 then there is no change to the delta double delta = connection .GetTempTraining(TempUpdate); weightChange = EncogMath .Sign(connection .GetTempTraining(TempGradient)) * delta; connection .SetTempTraining( TempLastGradient, connection .GetTempTraining(TempGradient)); } // apply the weight change, if any connection.Weight += weightChange; connection.SetTempTraining( TempLastWeightDelta, weightChange); }
/// <inheritdoc /> protected override void LearnConnection(IFreeformConnection connection) { // multiply the current and previous gradient, and take the // sign. We want to see if the gradient has changed its sign. int change = EncogMath .Sign(connection .GetTempTraining(TempGradient) *connection .GetTempTraining(TempLastGradient)); double weightChange = 0; // if the gradient has retained its sign, then we increase the // delta so that it will converge faster if (change > 0) { double delta = connection .GetTempTraining(TempUpdate) *RPROPConst.PositiveEta; delta = Math.Min(delta, _maxStep); weightChange = EncogMath .Sign(connection .GetTempTraining(TempGradient)) *delta; connection.SetTempTraining( TempUpdate, delta); connection .SetTempTraining( TempLastGradient, connection .GetTempTraining(TempGradient)); } else if (change < 0) { // if change<0, then the sign has changed, and the last // delta was too big double delta = connection .GetTempTraining(TempUpdate) *RPROPConst.NegativeEta; delta = Math.Max(delta, RPROPConst.DeltaMin); connection.SetTempTraining( TempUpdate, delta); weightChange = -connection .GetTempTraining(TempLastWeightDelta); // set the previous gradient to zero so that there will be no // adjustment the next iteration connection.SetTempTraining( TempLastGradient, 0); } else if (change == 0) { // if change==0 then there is no change to the delta double delta = connection .GetTempTraining(TempUpdate); weightChange = EncogMath .Sign(connection .GetTempTraining(TempGradient)) *delta; connection .SetTempTraining( TempLastGradient, connection .GetTempTraining(TempGradient)); } // apply the weight change, if any connection.Weight += weightChange; connection.SetTempTraining( TempLastWeightDelta, weightChange); }