public override void Optimize() { GradientClipper.Clip(this); foreach (var data in Data) { if (data.Variable.Type == VariableType.Parameter) { if (data.Variable.DataType == typeof(float)) { var a = _weights[data.Variable].Cast <float>(); var p = data.Tensor.Cast <float>(); var g = data.Gradient.Cast <float>(); Context.Assign(a, Rho.AsScalar <float>() * a + (1.0.AsScalar <float>() - Rho.AsScalar <float>()) * g * g); Context.Assign(p, p - LearningRate.AsScalar <float>() * g / (Sqrt(a) + Epsilon.AsScalar <float>())); } else if (data.Variable.DataType == typeof(double)) { var a = _weights[data.Variable].Cast <double>(); var p = data.Tensor.Cast <double>(); var g = data.Gradient.Cast <double>(); Context.Assign(a, Rho.AsScalar <double>() * a + (1.0.AsScalar <double>() - Rho.AsScalar <double>()) * g * g); Context.Assign(p, p - LearningRate.AsScalar <double>() * g / (Sqrt(a) + Epsilon.AsScalar <double>())); } else { throw new InvalidOperationException(); } } } }