/// <summary> /// Initializes a new Optimizer using the default values. /// <param name="theta">Theta to optimize.</param> /// <param name="maxIterations">Maximum number of iterations.</param> /// <param name="learningRate">Learning Rate (alpha) (Optional).</param> /// <param name="momentum">Momentum parameter for use in accelerated methods (Optional).</param> /// <param name="optimizationMethod">Type of optimization method to use (Optional).</param> /// <param name="optimizer">An external typed optimization method to use (Optional).</param> /// </summary> public Optimizer(Vector theta, int maxIterations, double learningRate = 1.0, double momentum = 0.9, OptimizationMethods optimizationMethod = OptimizationMethods.StochasticGradientDescent, OptimizationMethod optimizer = null) { this.Completed = false; if (optimizationMethod != OptimizationMethods.External) { switch (optimizationMethod) { case OptimizationMethods.FastGradientDescent: optimizer = new numl.Math.Optimization.Methods.GradientDescent.FastGradientDescent() { Momentum = momentum }; break; case OptimizationMethods.StochasticGradientDescent: optimizer = new numl.Math.Optimization.Methods.GradientDescent.StochasticGradientDescent(); break; case OptimizationMethods.NAGDescent: optimizer = new numl.Math.Optimization.Methods.GradientDescent.NAGDescent() { Momentum = momentum }; break; } } this.OpimizationMethod = optimizer; this.Properties = new OptimizerProperties() { Iteration = 0, MaxIterations = maxIterations, Cost = double.MaxValue, Gradient = Vector.Zeros(theta.Length), Theta = theta, LearningRate = learningRate, Momentum = momentum }; }
/// <summary> /// Initializes a new Optimizer using the default values. /// <param name="theta">Theta to optimize.</param> /// <param name="maxIterations">Maximum number of iterations.</param> /// <param name="learningRate">Learning Rate (alpha) (Optional).</param> /// <param name="momentum">Momentum parameter for use in accelerated methods (Optional).</param> /// <param name="optimizationMethod">Type of optimization method to use (Optional).</param> /// <param name="optimizer">An external typed optimization method to use (Optional).</param> /// </summary> public Optimizer(Vector theta, int maxIterations, double learningRate = 1.0, double momentum = 0.9, OptimizationMethods optimizationMethod = OptimizationMethods.StochasticGradientDescent, OptimizationMethod optimizer = null) { this.Completed = false; if (optimizationMethod != OptimizationMethods.External) { switch (optimizationMethod) { case OptimizationMethods.FastGradientDescent: optimizer = new FastGradientDescent() { Momentum = momentum }; break; case OptimizationMethods.StochasticGradientDescent: optimizer = new StochasticGradientDescent(); break; case OptimizationMethods.NAGDescent: optimizer = new NAGDescent() { Momentum = momentum }; break; } } this.OpimizationMethod = optimizer; this.Properties = new OptimizerProperties() { Iteration = 0, MaxIterations = maxIterations, Cost = double.MaxValue, Gradient = Vector.Zeros(theta.Length), Theta = theta, LearningRate = learningRate, Momentum = momentum }; }