/// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair.</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public NonlinearRegression Learn(double[][] x, double[] y, double[] weights = null) { solver.Token = Token; #if DEBUG double error = solver.Minimize(x, y); if (Double.IsNaN(error) || Double.IsInfinity(error)) { throw new Exception(); } #else solver.Minimize(x, y); #endif if (computeStandardErrors) { double[] errors = solver.StandardErrors; for (int i = 0; i < errors.Length; i++) { regression.StandardErrors[i] = solver.StandardErrors[i]; } } return(regression); }
/// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public NonlinearRegression Learn(double[][] x, double[] y, double[] weights = null) { if (weights != null) { throw new ArgumentException(Accord.Properties.Resources.NotSupportedWeights, "weights"); } solver.Token = Token; #if DEBUG double error = solver.Minimize(x, y); if (Double.IsNaN(error) || Double.IsInfinity(error)) { throw new Exception(); } #else solver.Minimize(x, y); #endif if (computeStandardErrors) { double[] errors = solver.StandardErrors; for (int i = 0; i < errors.Length; i++) { regression.StandardErrors[i] = solver.StandardErrors[i]; } } return(regression); }
/// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public NonlinearRegression Learn(double[][] x, double[] y, double[] weights = null) { if (weights != null) { throw new ArgumentException(Accord.Properties.Resources.NotSupportedWeights, "weights"); } if (NumberOfParameters == 0) { if (regression == null) { if (StartValues == null) { throw new InvalidOperationException("Please set the number of parameters, starting values, or the initial regression model."); } NumberOfParameters = StartValues.Length; } } if (regression == null) { this.regression = new NonlinearRegression(numberOfParameters, function, gradient); if (StartValues != null) { this.regression.Coefficients.SetTo(StartValues); } } if (this.solver == null) { this.solver = new LevenbergMarquardt(numberOfParameters); } this.solver.NumberOfVariables = numberOfParameters; this.solver.Solution = regression.Coefficients; this.solver.Function = new LeastSquaresFunction(regression.Function); this.solver.Gradient = new LeastSquaresGradientFunction(regression.Gradient); this.solver.Token = Token; double error = solver.Minimize(x, y); if (Double.IsNaN(error) || Double.IsInfinity(error)) { throw new Exception(); } if (computeStandardErrors) { double[] errors = solver.StandardErrors; for (int i = 0; i < errors.Length; i++) { regression.StandardErrors[i] = solver.StandardErrors[i]; } } return(regression); }
/// <summary> /// Runs the fitting algorithm. /// </summary> /// /// <param name="inputs">The input training data.</param> /// <param name="outputs">The output associated with each of the outputs.</param> /// /// <returns> /// The sum of squared errors after the learning. /// </returns> /// public double Run(double[][] inputs, double[] outputs) { double error = solver.Minimize(inputs, outputs); if (computeStandardErrors) { double[] errors = solver.StandardErrors; for (int i = 0; i < errors.Length; i++) { regression.StandardErrors[i] = solver.StandardErrors[i]; } } return(error); }
/// <summary> /// Runs the fitting algorithm. /// </summary> /// /// <param name="inputs">The input training data.</param> /// <param name="outputs">The output associated with each of the outputs.</param> /// /// <returns> /// The sum of squared errors after the learning. /// </returns> /// public double Run(double[][] inputs, double[] outputs) { double error = solver.Minimize(inputs, outputs); if (computeStandardErrors) { double[] errors = solver.StandardErrors; for (int i = 0; i < errors.Length; i++) { regression.StandardErrors[i] = solver.StandardErrors[i]; } } #if DEBUG if (Double.IsNaN(error) || Double.IsInfinity(error)) { throw new Exception(); } #endif return(error); }