/// <summary> /// Initializes a new instance of the <see cref="HiddenGradientDescentLearning<T>"/> class. /// </summary> /// /// <param name="model">The model to be trained.</param> /// public HiddenGradientDescentLearning(HiddenConditionalRandomField<T> model) { Model = model; convergence = new RelativeConvergence(); calculator = new ForwardBackwardGradient<T>(model); gradient = new double[Model.Function.Weights.Length]; }
/// <summary> /// Constructs a new Conjugate Gradient learning algorithm. /// </summary> /// public HiddenConjugateGradientLearning(HiddenConditionalRandomField <T> model) { Model = model; calculator = new ForwardBackwardGradient <T>(model); optimizer = new ConjugateGradient(model.Function.Weights.Length); optimizer.Progress += new EventHandler <OptimizationProgressEventArgs>(progressChanged); optimizer.Function = calculator.Objective; optimizer.Gradient = calculator.Gradient; }
/// <summary> /// Constructs a new L-BFGS learning algorithm. /// </summary> /// public HiddenQuasiNewtonLearning(HiddenConditionalRandomField <T> model) { Model = model; calculator = new ForwardBackwardGradient <T>(model); lbfgs = new BroydenFletcherGoldfarbShanno(model.Function.Weights.Length); lbfgs.Tolerance = 1e-3; lbfgs.Function = calculator.Objective; lbfgs.Gradient = calculator.Gradient; }
/// <summary> /// Releases unmanaged and - optionally - managed resources /// </summary> /// /// <param name="disposing"><c>true</c> to release both managed /// and unmanaged resources; <c>false</c> to release only unmanaged /// resources.</param> /// protected virtual void Dispose(bool disposing) { if (disposing) { // free managed resources if (calculator != null) { calculator.Dispose(); calculator = null; } } }
/// <summary> /// Initializes a new instance of the <see cref="HiddenResilientGradientLearning{T}"/> class. /// </summary> /// /// <param name="model">Model to teach.</param> /// public HiddenResilientGradientLearning(HiddenConditionalRandomField<T> model) { Model = model; calculator = new ForwardBackwardGradient<T>(model); convergence = new RelativeConvergence(iterations: 100, tolerance: 0, checks: 3); int parameters = Model.Function.Weights.Length; gradient = new double[parameters]; previousGradient = new double[parameters]; weightsUpdates = new double[parameters]; // Initialize steps Reset(initialStep); }
/// <summary> /// Constructs a new L-BFGS learning algorithm. /// </summary> /// public HiddenQuasiNewtonLearning(HiddenConditionalRandomField <T> model) { Model = model; calculator = new ForwardBackwardGradient <T>(model); lbfgs = new BoundedBroydenFletcherGoldfarbShanno(model.Function.Weights.Length); lbfgs.FunctionTolerance = 1e-3; lbfgs.Function = calculator.Objective; lbfgs.Gradient = calculator.Gradient; for (int i = 0; i < lbfgs.UpperBounds.Length; i++) { lbfgs.UpperBounds[i] = 1e10; lbfgs.LowerBounds[i] = -1e100; } }