public void CanSolveTameQuadratic2dFact() { var output = NelderMead.MethodSolve(QuadraticFunction2d, new double[] { 0, 0 }, new double[] { 0.5, 0.5 }, 1e-8, 10000); var functionOutput = QuadraticFunction2d(output); Assert.Equal(0, functionOutput, 4); }
public void TestExpansion() { Poly cf = new Poly(); NelderMead optim = new NelderMead(cf); DoubleVector[] simplex = new DoubleVector[3]; simplex[0] = new DoubleVector(new double[2] { 1, 1 }); simplex[1] = new DoubleVector(new double[2] { 1, -1 }); simplex[2] = new DoubleVector(new double[2] { 2, 0 }); optim.InitializeMethod(simplex); optim.Rho = 1.5; optim.Chi = 1 / 1.5; optim.IterateMethod(); DoubleVector xr = (1 + optim.Rho * optim.Chi) * (new DoubleVector(new double[2] { 1, 0 })) - optim.Rho * optim.Chi * simplex[2]; Assert.IsTrue(optim.LastStep == NelderMead.Step.Expansion); Assert.AreEqual(optim.Simplex[0][0], xr[0]); Assert.AreEqual(optim.Simplex[0][1], xr[1]); }
public void TestInsideContraction() { Poly cf = new Poly(); NelderMead optim = new NelderMead(cf); DoubleVector[] simplex = new DoubleVector[3]; simplex[0] = new DoubleVector(new double[2] { 1, 1 }); simplex[1] = new DoubleVector(new double[2] { 1, -1 }); simplex[2] = new DoubleVector(new double[2] { 2, 0 }); optim.Rho = 10; optim.Psi = 0.5; optim.InitializeMethod(simplex); optim.IterateMethod(); DoubleVector xr = (1 - optim.Psi) * (new DoubleVector(new double[2] { 1, 0 })) + optim.Psi * simplex[2]; Assert.IsTrue(optim.LastStep == NelderMead.Step.InsideContraction); Assert.AreEqual(optim.Simplex[2][0], xr[0]); Assert.AreEqual(optim.Simplex[2][1], xr[1]); }
public void TestInitializeMethod() { var cf = new Rosenbrock(); var optim = new NelderMead(cf); var x0 = new DoubleVector(new double[4] { 0, 1, 2, 3 }); optim.SimplexDelta = 0.1; optim.SimplexZeroDelta = 0.0001; optim.InitializeMethod(x0); Assert.AreEqual(optim.Simplex.Length, 5); for (int i = 0; i < optim.Simplex.Length; i++) { Assert.AreEqual(optim.Simplex[i][0], x0[0], optim.SimplexZeroDelta); Assert.AreEqual(optim.Simplex[i][1], x0[1], optim.SimplexDelta * x0[1] + 0.001); Assert.AreEqual(optim.Simplex[i][2], x0[2], optim.SimplexDelta * x0[2] + 0.001); Assert.AreEqual(optim.Simplex[i][3], x0[3], optim.SimplexDelta * x0[3] + 0.001); } for (int i = 1; i < optim.Simplex.Length; i++) { Assert.IsTrue(cf.Value(optim.Simplex[i - 1]) < cf.Value(optim.Simplex[i])); } }
public void TestReflection() { var cf = new Poly(); var optim = new NelderMead(cf); var simplex = new DoubleVector[3]; simplex[0] = new DoubleVector(new double[2] { 1, 1 }); simplex[1] = new DoubleVector(new double[2] { 1, -1 }); simplex[2] = new DoubleVector(new double[2] { 2, 0 }); optim.Rho = 1.5; optim.InitializeMethod(simplex); optim.IterateMethod(); DoubleVector xr = (1 + optim.Rho) * (new DoubleVector(new double[2] { 1, 0 })) - optim.Rho * simplex[2]; Assert.IsTrue(optim.LastStep == NelderMead.Step.Reflection); Assert.AreEqual(optim.Simplex[0][0], xr[0]); Assert.AreEqual(optim.Simplex[0][1], xr[1]); }
public void gh335() { // https://github.com/accord-net/framework/issues/335 Func <Double[], Double> eval = (val) => { // WxMaxima command: plot3d(y^2+4*y+x^2-2*x,[x,-3,5], [y,-5,3],[grid,8,8]); Double x = val[0]; Double y = val[1]; Double ret = y * y + 4 * y + x * x - 2 * x; Debug.WriteLine("{2}; x={0}; y={1}", x, y, ret); return(ret); }; // This values are relevant for my RealWorld(TM) scenario Double[] init = new double[] { 0.5, 0 }; NelderMead nm = new NelderMead(2, eval); nm.Minimize(init); // Solution Assert.AreEqual(1, nm.Solution[0], 1e-7); Assert.AreEqual(-2, nm.Solution[1], 1e-6); }
public void Solve() { startingDist = Vector3.Distance(raytracer.eyePerspective.transform.position, planePos); #if UNITY_EDITOR UnityEditor.Undo.RecordObject(newScreen, "Move Screen Position"); foreach (Testing.DenseOptimizer.TransformEntry trans in TransformsToOptimize) { UnityEditor.Undo.RecordObject(trans.TransformToOptimize, "Optimize Transform"); } #endif solver = new NelderMead(Testing.DenseOptimizer.calculateCoordsFromTransformEntries(ref TransformsToOptimize, rotationUnitRatio), costFunction, simplexSize); for (int i = 0; i < 100; i++) { solver.stepSolver(); } Testing.DenseOptimizer.setTransformsFromCoord(solver.simplexVertices[0].coordinates, ref TransformsToOptimize, rotationUnitRatio); newScreen.position = Vector3.ProjectOnPlane(newScreen.position - planePos, planeNormal) + planePos; newScreen.rotation = Quaternion.FromToRotation(newScreen.forward, planeNormal) * newScreen.rotation; //Debug.Log("First: " + solver.simplexVertices[0].cost + ", Last: " + solver.simplexVertices[solver.simplexVertices.Count - 1].cost); }
public double[] Run(double[] Xobdata, double[] obdatay, double[] para //,double noise ) { Parameters.Verbosity = VerbosityLevels.OnlyCritical; //Debug.Listeners.Add(new TextWriterTraceListener(Console.Out)); var optMethod = new NelderMead(); optMethod.Add(new GoldenSection(1e-8, 0.001)); optMethod.Add(new DeltaFConvergence(1e-7)); var opt = new OneDObjectiveFunc(Xobdata, obdatay, para); optMethod.Add(opt); double[] xStar; var xInit = para; try { var fstar_opt = optMethod.Run(out xStar, xInit); var output = new double[4] { xStar[0], xStar[1], xStar[2], fstar_opt }; return(output); } catch { var output = new double[4] { 100, 1000, 1000, 100000 }; return(output); } // return xStar; }
/// <summary> /// Updates the current solver algorithm /// </summary> private void InitializeSolver() { // Update ActiveConstants var count = 0; for (var i = 0; i < Constants.ConstantVariables.Count; i++) { ActiveConstants[i] = (bool)VariableGrid.Rows[i].Cells[0].Value; if (ActiveConstants[i]) { count++; } } // Initialize Solver Solver = new NelderMead(count, SolverFX) { Convergence = new GeneralConvergence(count) { Evaluations = 0, MaximumEvaluations = 200 }, Token = TokenSource.Token }; // Set step size const double scale = 0.15; var data = ParseConstantsInput(true); for (var i = 0; i < Solver.StepSize.Length; i++) { Solver.StepSize[i] = data[i] * scale; } // Set Lower bounds count = 0; for (var i = 0; i < ActiveConstants.Length; i++) { if (!ActiveConstants[i]) { continue; } if (float.TryParse(Convert.ToString(VariableGrid.Rows[i].Cells[5].Value), out var min)) { Solver.LowerBounds[count] = min; } if (float.TryParse(Convert.ToString(VariableGrid.Rows[i].Cells[4].Value), out var max)) { Solver.UpperBounds[count] = max; } count++; } }
public double[] Run(double[,] Xobdata, double[] obdatay, double[] para) { Parameters.Verbosity = VerbosityLevels.OnlyCritical; // this next line is to set the Debug statements from OOOT to the Console. //Debug.Listeners.Add(new TextWriterTraceListener(Console.Out)); // var optMethod = new GradientBasedOptimization(); // var Y = ThreeDinput.GetfVactor(obdatay); // var optMethod = new GradientBasedOptimization(); // var optMethod = new GeneralizedReducedGradientActiveSet(); // var optMethod = new HillClimbing(); var optMethod = new NelderMead(); // optMethod.Add(new StochasticNeighborGenerator); // var optMethod = new NelderMead(); // optMethod.Add(new CyclicCoordinates()); // optMethod.Add(new CyclicCoordinates()); optMethod.Add(new GoldenSection(1e-7, 0.01)); // optMethod.Add(new DeltaXConvergence(1e-10)); optMethod.Add(new DeltaFConvergence(1e-6)); // optMethod.Add(new MaxSpanInPopulationConvergence(1e-3)); //optMethod.Add(new inequalityWithConstant()) var opt = new OneDObjectiveFunc(Xobdata, obdatay, para); optMethod.Add(opt); // optMethod.Add(new Inequality(opt, Xobdata, obdatay)); // optMethod.Add(new OptimizationToolbox.greaterThanConstant { constant = 0.0, index = 0 }); //optMethod.Add(new OptimizationToolbox.greaterThanConstant { constant = 0.0, index = 1 }); //optMethod.Add(new OptimizationToolbox.greaterThanConstant { constant = 0.0, index = 2 }); //optMethod.Add(new OptimizationToolbox.lessThanConstant() { constant = 0.30, index = 2 }); //optMethod.Add(new OptimizationToolbox.squaredExteriorPenalty(optMethod, 1.0)); // var p = new double[2] { para[0], para[1] }; double[] xStar; var xInit = para; try { var fstar_opt = optMethod.Run(out xStar, xInit); var output = new double[5] { xStar[0], xStar[1], xStar[2], xStar[3], fstar_opt }; // var output = new double[4] { xStar[0], xStar[1], xStar[2], fstar_opt };//1d return(output); } catch { // var output = new double[4] { 1000, 1000, 1000, 100000000 };//1d var output = new double[5] { 100, 1000, 1000, 1000, 100000000 }; return(output); } // return xStar; }
void Start() { solver = new NelderMead(calculateCoordsFromTransformEntries(ref TransformsToOptimize), distanceCost, 0.001f); filename = Directory.GetParent(Application.dataPath).FullName + "/" + gameObject.name + " DisplayCalibration.txt"; if (File.Exists(filename)) { string calibrationData = File.ReadAllText(filename); ListWrapper serializableList = JsonUtility.FromJson <ListWrapper>(calibrationData); PointCorrespondences = serializableList.data; } }
protected void RunFullTPSquared() { Random r = new Random(); double[,] desiredPath = { { 1.87, 8 }, { 2.93, 8.46 }, { 2.80, 8.41 }, { 1.99, 8.06 }, { 0.96, 7.46 }, { 0, 6.71 },{ -0.77, 5.93 }, { -1.3, 5.26 }, { -1.60, 4.81 }, { -1.65, 4.75 }, { -1.25, 5.33 }, { 0, 6.71 } }; double startAngle = 0; double endAngle = 2 * Math.PI; double iOmega = 2; double iAlpha = 0; MechSimulation sim = new MechSimulation(); BoundingBox bb = new BoundingBox(sim, 10, 10); GrashofCriteria cc = new GrashofCriteria(sim, 0); List <candidate> candidates = new List <candidate>(); while (true) //notConverged()) { // 1. Generate topologies - calling rulesets - this adds candidates to the candidates list. //2. Evaluate & Param Tuning foreach (candidate c in candidates) { if (double.IsNaN(c.f0)) { sim.Graph = c.graph; NelderMead NMOpt = new NelderMead(); NMOpt.Add(sim); //gbu.Add(new GoldenSection(.001, 20)); //gbu.Add(new BFGSDirection()); NMOpt.Add(new MaxIterationsConvergence(100)); double[] x0 = new double[8]; for (int i = 0; i < x0.GetLength(0); i++) //since I am going to assign ground pivots as they are { x0[i] = r.NextDouble(); } double[] xStar; double fStar = NMOpt.Run(out xStar, x0); // double fStar = NMOpt.Run(out xStar,8); c.f0 = fStar; } } //3. Pruning // throw out topologies (candidates) that have bad/large values of f0. //4. Guide? } SearchIO.output("***Completed!***"); }
public void Strip() { // Set the dates for all the curves foreach (ICurveForStripping curve in curveDates.Keys) { curve.SetDates(curveDates[curve].ToArray()); } // Check that all the products can be valued double[] values = new double[targetMetrics.Count]; for (int i = 0; i < targetMetrics.Count; i++) { values[i] = targetMetrics[i](); } // Get the vector to be solved List <double> guessList = new List <double>(); int totalCounter = 0; for (int i = 0; i < curveSet.Count; i++) { double[] rates = curveSet[i].GetRates(); for (int j = 0; j < rates.Length; j++) { guessList.Add(rates[j]); curveAndIndexMap[totalCounter] = new Tuple <int, int>(i, j); totalCounter++; } } if (guessList.Count != targetMetrics.Count) { throw new ArgumentException(string.Format("There are {0} metrics as contraints but the curves have {1} free parameters.", targetMetrics.Count, guessList.Count)); } double[] guess = guessList.ToArray(); var optimizer = new NelderMead(numberOfVariables: guess.Length, function: ErrorFunction); //var optimizer = new Cobyla(numberOfVariables: guess.Length, function: ErrorFunction); //var optimizer = new BroydenFletcherGoldfarbShanno(numberOfVariables: guess.Length); //optimizer.Function = ErrorFunction; //optimizer.Gradient = ?? //nm.Convergence. bool success = optimizer.Minimize(guess); double minValue = optimizer.Value; double[] solution = optimizer.Solution; var optimizer2 = new NelderMead(numberOfVariables: guess.Length, function: ErrorFunction); optimizer2.Minimize(solution); }
public SviRawParameters SolveSviRaw(ATMStraddleConstraint atmConstraint, RRBFConstraint[] smileConstraints, DateTime buildDate, DateTime expiry, double fwd, bool vegaWeightedFit = true) { _atmConstraint = atmConstraint; _smileConstraints = smileConstraints; _numberOfConstraints = smileConstraints.Length * 2 + 1; _vegaWeighted = vegaWeightedFit; _fwd = fwd; _buildDate = buildDate; _tExp = (expiry - buildDate).TotalDays / 365.0; var startingPoint = new[] { atmConstraint.MarketVol *atmConstraint.MarketVol *_tExp - Sqrt(atmConstraint.MarketVol), 1.0, smileConstraints.Average(x => x.RisykVol) >= 0 ? 0.1 : -0.1, 0, Sqrt(atmConstraint.MarketVol) }; //var startingPoint = new[] { atmConstraint.MarketVol* atmConstraint.MarketVol *_tExp-Sqrt(atmConstraint.MarketVol), 0.5, smileConstraints.Average(x => x.RisykVol) >= 0 ? 0.25 : -0.25, 0, Sqrt(atmConstraint.MarketVol) }; var initialStep = new[] { atmConstraint.MarketVol *atmConstraint.MarketVol, 0.5, 0.5, 0.002, Sqrt(atmConstraint.MarketVol) / 2 }; //var startingPoint = new[] { atmConstraint.MarketVol, 1.0, 0.1, 0, 0.1 }; //var initialStep = new[] { 0.1, 0.25, 0.25, 0.01, 0.1 }; var currentError = new Func <double[], double>(x => { var currentSVI = new SviRawParameters { A = x[0], B = x[1], Rho = x[2], M = x[3], Sigma = x[4], }; var e = ComputeErrorsSviRaw(currentSVI); return(Sqrt(e.Sum())); }); SetupConstraints(); var optimal = NelderMead.MethodSolve(currentError, startingPoint, initialStep, 1e-10, 50000); return(new SviRawParameters { A = optimal[0], B = optimal[1], Rho = optimal[2], M = optimal[3], Sigma = optimal[4], }); }
private void button1_Click(object sender, RoutedEventArgs e) { Function.Vector start = new Function.Vector(2); start[0] = Double.Parse(vX1.Text); start[1] = Double.Parse(vX1.Text); HookeJeeves sol1 = new HookeJeeves(start); Function.Vector res = sol1.solve(new MyFunction(), Double.Parse(vPrecision.Text)); DELETEMe = res[0] + "; " + res[1]; NelderMead sol2 = new NelderMead(start); Function.Vector res2 = sol2.solve(new MyFunction(), Double.Parse(vPrecision.Text)); DELETEMe += "\n" + res2[0] + "; " + res2[1]; DataContext = this; }
public NelderMeadTests() { var hyps = NelderMeadHyperParameters.GetDefaultHyperParameters(); hyps.UpdateHyperParameterValue( NelderMeadHyperParameters.Simplex_Creation_Step_Size, Step_Size); var fitnessCalc = new FitnessCalculatorSingleObjective( v => v.ElementAt(0), v => 1000.0); optimiser = new NelderMead( fitnessCalc, DecisionVector.CreateFromArray( DecisionSpace.CreateForUniformDoubleArray(Number_Of_Dimensions, double.MinValue, double.MaxValue), Enumerable.Repeat(0.0, Number_Of_Dimensions)), hyps); }
public void TestRosenbrock() { var cf = new Rosenbrock(); var optim = new NelderMead(cf); var x0 = new DoubleVector(new double[5] { 1.3, 0.7, 0.8, 1.9, 1.2 }); optim.Minimize(x0); Assert.AreEqual(optim.SolutionValue, 0.0, 0.0001); Assert.AreEqual(optim.SolutionVector[0], 1.0, 0.0001); Assert.AreEqual(optim.SolutionVector[1], 1.0, 0.0001); Assert.AreEqual(optim.SolutionVector[2], 1.0, 0.0001); Assert.AreEqual(optim.SolutionVector[3], 1.0, 0.0001); Assert.AreEqual(optim.SolutionVector[4], 1.0, 0.0001); }
public void ConstructorTest4() { var function = new NonlinearObjectiveFunction(2, x => Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0)); NelderMead solver = new NelderMead(function); Assert.IsTrue(solver.Minimize()); double minimum = solver.Value; double[] solution = solver.Solution; Assert.AreEqual(0, minimum, 1e-10); Assert.AreEqual(-1, solution[0], 1e-5); Assert.AreEqual(1, solution[1], 1e-4); double expectedMinimum = function.Function(solver.Solution); Assert.AreEqual(expectedMinimum, minimum); }
/// <summary> /// Fits a Nelson Siegel curve to data /// </summary> /// <param name="t"></param> /// <param name="r"></param> /// <returns></returns> public static NelsonSiegel Fit(Date anchorDate, Date[] dates, double[] rates) { var times = new double[dates.Length]; for (var i = 0; i < dates.Length; i++) { times[i] = dates[i] - anchorDate; } Func <double[], double> f = x => ErrorFunction(x, times, rates); var nm = new NelderMead(4, f); var success = nm.Minimize(new[] { rates[0], rates[0], rates[0], times.Last() / 5.0 }); var minValue = nm.Value; var solution = nm.Solution; var curve = new NelsonSiegel(anchorDate, solution[0], solution[1], solution[2], solution[3]); return(curve); }
public void ConstructorTest1() { Func<double[], double> function = // min f(x) = 10 * (x+1)^2 + y^2 x => 10.0 * Math.Pow(x[0] + 1.0, 2.0) + Math.Pow(x[1], 2.0); NelderMead solver = new NelderMead(2, function); Assert.IsTrue(solver.Minimize()); double minimum = solver.Value; double[] solution = solver.Solution; Assert.AreEqual(0, minimum, 1e-10); Assert.AreEqual(-1, solution[0], 1e-5); Assert.AreEqual(0, solution[1], 1e-5); double expectedMinimum = function(solver.Solution); Assert.AreEqual(expectedMinimum, minimum); }
/// <summary> /// Fits a Nelson Siegel curve to data /// </summary> /// <param name="t"></param> /// <param name="r"></param> /// <returns></returns> public static NelsonSiegel Fit(Date anchorDate, Date[] dates, double[] rates) { double[] times = new double[dates.Length]; for (int i = 0; i < dates.Length; i++) { times[i] = dates[i] - anchorDate; } Func <double[], double> f = (x) => ErrorFunction(x, times, rates); var nm = new NelderMead(numberOfVariables: 4, function: f); bool success = nm.Minimize(new double[] { rates[0], rates[0], rates[0], times.Last() / 5.0 }); double minValue = nm.Value; double[] solution = nm.Solution; NelsonSiegel curve = new NelsonSiegel(anchorDate, solution[0], solution[1], solution[2], solution[3]); return(curve); }
public static void doTestHarness() { try { if (1 == 1) { QuadraticSearchFunction2D qsf2d = new QuadraticSearchFunction2D(); NelderMead2D nm2d = new NelderMead2D(); nm2d.MAX_ITERATIONS = 500; nm2d.initialiseSearch(qsf2d, new Point2D(0, 0), new Point2D(100, 100), new Point2D(-100, -50)); string error_message; double optimal_score; Point2D result = nm2d.search(out error_message, out optimal_score); Console.WriteLine("Lowest at " + result + " with score " + optimal_score + " with error " + error_message); } if (1 == 1) { QuadraticSearchFunction qsf = new QuadraticSearchFunction(); NelderMead nm = new NelderMead(3); nm.MAX_ITERATIONS = 500; double[][] initial_points = new double[4][]; for (int i = 0; i < 4; ++i) { initial_points[i] = new double[3]; } initial_points[0][0] = 0; initial_points[0][1] = 0; initial_points[1][0] = 100; initial_points[1][1] = 100; initial_points[2][0] = -100; initial_points[2][1] = -50; initial_points[3][0] = -200; initial_points[3][1] = -150; nm.initialiseSearch(qsf, initial_points); string error_message; double optimal_score; double[] result = nm.search(out error_message, out optimal_score); Console.WriteLine("Lowest at " + ArrayFormatter.listElements(result) + " with score " + optimal_score + " with error " + error_message); } } catch (GenericException e) { Console.WriteLine("There was an error: {0}", e.Message); } }
public SABRParameters SolveSABR(ATMStraddleConstraint atmConstraint, RRBFConstraint[] smileConstraints, DateTime buildDate, DateTime expiry, double fwd, double beta = 1.0, bool vegaWeightedFit = true) { _atmConstraint = atmConstraint; _smileConstraints = smileConstraints; _numberOfConstraints = smileConstraints.Length * 2 + 1; _vegaWeighted = vegaWeightedFit; _fwd = fwd; _buildDate = buildDate; _tExp = (expiry - buildDate).TotalDays / 365.0; var startingPoint = new[] { atmConstraint.MarketVol, Sqrt(atmConstraint.MarketVol), smileConstraints.Average(x => x.RisykVol) >= 0 ? 0.1 : -0.1 }; var initialStep = new[] { 0.1, 0.25, 0.25 }; var currentError = new Func <double[], double>(x => { var currentSABR = new SABRParameters { Alpha = x[0], Beta = beta, Nu = x[1], Rho = x[2], }; var e = ComputeErrorsSABR(currentSABR); return(e.Sum(ee => ee * ee)); }); SetupConstraints(); var optimal = NelderMead.MethodSolve(currentError, startingPoint, initialStep, 1e-8, 10000); return(new SABRParameters { Alpha = optimal[0], Beta = beta, Nu = optimal[1], Rho = optimal[2], }); }
public void ConstructorTest1() { Func <double[], double> function = // min f(x) = 10 * (x+1)^2 + y^2 x => 10.0 * Math.Pow(x[0] + 1.0, 2.0) + Math.Pow(x[1], 2.0); NelderMead solver = new NelderMead(2, function); Assert.IsTrue(solver.Minimize()); double minimum = solver.Value; double[] solution = solver.Solution; Assert.AreEqual(0, minimum, 1e-10); Assert.AreEqual(-1, solution[0], 1e-5); Assert.AreEqual(0, solution[1], 1e-5); double expectedMinimum = function(solver.Solution); Assert.AreEqual(expectedMinimum, minimum); }
//Initialize the solver with a starting state and a cost function void Start() { debugPoints = new Vector4[dimensions + 1]; float[] initialCoord = new float[dimensions]; for (int i = 0; i < dimensions; i++) { initialCoord[i] = Random.value * 7f; } solver = new NelderMead(initialCoord, distanceCost); for (int i = 0; i < solver.simplexVertices.Count; i++) { int index = solver.simplexVertices[i].originalIndex; debugPoints[index] = new Vector4( solver.simplexVertices[i].coordinates[0], solver.simplexVertices[i].coordinates[1], solver.simplexVertices[i].coordinates[2], solver.simplexVertices[i].cost); } }
void Start() { float[] initialCoord = new float[12]; for (int i = 0; i < 3; i++) { initialCoord[i] = reflector.localPosition[i]; } for (int i = 0; i < 3; i++) { initialCoord[i + 3] = reflector.localRotation.eulerAngles[i] / rotationUnitRatio; } for (int i = 0; i < 3; i++) { initialCoord[i + 6] = screen.localPosition[i]; } for (int i = 0; i < 3; i++) { initialCoord[i + 9] = screen.localRotation.eulerAngles[i] / rotationUnitRatio; } solver = new NelderMead(initialCoord, distanceCost, 0.001f); }
public void SubspaceTest1() { var function = new NonlinearObjectiveFunction(5, x => 10.0 * Math.Pow(x[0] * x[0] - x[1], 2.0) + Math.Pow(1.0 + x[0], 2.0)); NelderMead solver = new NelderMead(function); solver.NumberOfVariables = 2; Assert.IsTrue(solver.Minimize()); double minimum = solver.Value; double[] solution = solver.Solution; Assert.AreEqual(5, solution.Length); Assert.AreEqual(-0, minimum, 1e-6); Assert.AreEqual(-1, solution[0], 1e-3); Assert.AreEqual(+1, solution[1], 1e-3); double expectedMinimum = function.Function(solver.Solution); Assert.AreEqual(expectedMinimum, minimum); }
public void TestShrink() { Poly cf = new Poly(); NelderMead optim = new NelderMead(cf); DoubleVector[] simplex = new DoubleVector[3]; simplex[0] = new DoubleVector(new double[2] { 1, 1 }); simplex[1] = new DoubleVector(new double[2] { 1, -1 }); simplex[2] = new DoubleVector(new double[2] { 2, 0 }); optim.Rho = 10; optim.Psi = 1.5; optim.InitializeMethod(simplex); optim.IterateMethod(); Assert.IsTrue(optim.LastStep == NelderMead.Step.Shrink); }
public void ConstructorTest1() { #region doc_min // Let's say we would like to find the minimum // of the function "f(x) = 10 * (x+1)^2 + y^2". // In code, this means we would like to minimize: Func <double[], double> function = (double[] x) => 10.0 * Math.Pow(x[0] + 1.0, 2.0) + Math.Pow(x[1], 2.0); // We can do so using the NelderMead class: var solver = new NelderMead(numberOfVariables: 2) { Function = function // f(x) = 10 * (x+1)^2 + y^2 }; // Now, we can minimize it with: bool success = solver.Minimize(); // And get the solution vector using double[] solution = solver.Solution; // should be (-1, 1) // The minimum at this location would be: double minimum = solver.Value; // should be 0 // Which can be double-checked against Wolfram Alpha if there is need: // https://www.wolframalpha.com/input/?i=min+10+*+(x%2B1)%5E2+%2B+y%5E2 #endregion Assert.IsTrue(success); Assert.AreEqual(0, minimum, 1e-10); Assert.AreEqual(-1, solution[0], 1e-5); Assert.AreEqual(0, solution[1], 1e-5); double expectedMinimum = function(solver.Solution); Assert.AreEqual(expectedMinimum, minimum); }
protected override void Run(string[] args) { if (file_format == RatingFileFormat.KDDCUP_2011) { user_mapping = new IdentityMapping(); item_mapping = new IdentityMapping(); } base.Run(args); bool do_eval = false; if (test_ratio > 0 || chronological_split != null) { do_eval = true; } if (test_file != null && !test_no_ratings) { do_eval = true; } Console.Error.WriteLine( string.Format(CultureInfo.InvariantCulture, "ratings range: [{0}, {1}]", recommender.MinRating, recommender.MaxRating)); if (test_ratio > 0) { var split = new RatingsSimpleSplit(training_data, test_ratio); recommender.Ratings = training_data = split.Train[0]; test_data = split.Test[0]; Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "test ratio {0}", test_ratio)); } if (chronological_split != null) { var split = chronological_split_ratio != -1 ? new RatingsChronologicalSplit((ITimedRatings)training_data, chronological_split_ratio) : new RatingsChronologicalSplit((ITimedRatings)training_data, chronological_split_time); recommender.Ratings = training_data = split.Train[0]; test_data = split.Test[0]; if (test_ratio != -1) { Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "test ratio (chronological) {0}", chronological_split_ratio)); } else { Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "split time {0}", chronological_split_time)); } } Console.Write(training_data.Statistics(test_data, user_attributes, item_attributes)); if (find_iter != 0) { if (!(recommender is IIterativeModel)) { Abort("Only iterative recommenders (interface IIterativeModel) support --find-iter=N."); } var iterative_recommender = recommender as IIterativeModel; iterative_recommender.NumIter = num_iter; Console.WriteLine(recommender); if (cross_validation > 1) { recommender.DoIterativeCrossValidation(cross_validation, max_iter, find_iter); } else { var eval_stats = new List <double>(); if (load_model_file == null) { recommender.Train(); } if (compute_fit) { Console.WriteLine("fit {0} iteration {1}", Render(recommender.Evaluate(training_data)), iterative_recommender.NumIter); } Console.WriteLine("{0} iteration {1}", Render(Evaluate()), iterative_recommender.NumIter); for (int it = (int)iterative_recommender.NumIter + 1; it <= max_iter; it++) { TimeSpan time = Wrap.MeasureTime(delegate() { iterative_recommender.Iterate(); }); training_time_stats.Add(time.TotalSeconds); if (it % find_iter == 0) { if (compute_fit) { time = Wrap.MeasureTime(delegate() { Console.WriteLine("fit {0} iteration {1}", recommender.Evaluate(training_data), it); }); fit_time_stats.Add(time.TotalSeconds); } EvaluationResults results = null; time = Wrap.MeasureTime(delegate() { results = Evaluate(); }); eval_time_stats.Add(time.TotalSeconds); eval_stats.Add(results[eval_measures[0]]); Console.WriteLine("{0} iteration {1}", Render(results), it); Model.Save(recommender, save_model_file, it); if (prediction_file != null) { recommender.WritePredictions(test_data, prediction_file + "-it-" + it, user_mapping, item_mapping, prediction_line, prediction_header); } if (epsilon > 0.0 && results[eval_measures[0]] - eval_stats.Min() > epsilon) { Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "{0} >> {1}", results[eval_measures[0]], eval_stats.Min())); Console.Error.WriteLine("Reached convergence on training/validation data after {0} iterations.", it); break; } if (results[eval_measures[0]] > cutoff) { Console.Error.WriteLine("Reached cutoff after {0} iterations.", it); break; } } } // for if (max_iter % find_iter != 0) { recommender.WritePredictions(test_data, prediction_file, user_mapping, item_mapping, prediction_line, prediction_header); } } } else { TimeSpan seconds; Console.Write(recommender + " "); if (load_model_file == null) { if (cross_validation > 1) { Console.WriteLine(); var results = DoCrossValidation(); Console.Write(Render(results)); do_eval = false; } else { if (search_hp) { double result = NelderMead.FindMinimum("RMSE", recommender); Console.Error.WriteLine("estimated quality (on split) {0}", result.ToString(CultureInfo.InvariantCulture)); } seconds = Wrap.MeasureTime(delegate() { recommender.Train(); }); Console.Write(" training_time " + seconds + " "); } } if (do_eval) { if (online_eval) { seconds = Wrap.MeasureTime(delegate() { Console.Write(Render(recommender.EvaluateOnline(test_data))); }); } else { seconds = Wrap.MeasureTime(delegate() { Console.Write(Render(Evaluate())); }); } Console.Write(" testing_time " + seconds); if (compute_fit) { Console.Write("\nfit "); seconds = Wrap.MeasureTime(delegate() { Console.Write(Render(recommender.Evaluate(training_data))); }); Console.Write(" fit_time " + seconds); } } if (prediction_file != null) { Console.WriteLine(); seconds = Wrap.MeasureTime(delegate() { recommender.WritePredictions(test_data, prediction_file, user_mapping, item_mapping, prediction_line, prediction_header); }); Console.Error.WriteLine("prediction_time " + seconds); } Console.WriteLine(); } Model.Save(recommender, save_model_file); DisplayStats(); }
static void Main(string[] args) { Assembly assembly = Assembly.GetExecutingAssembly(); Assembly.LoadFile(Path.GetDirectoryName(assembly.Location) + Path.DirectorySeparatorChar + "MyMediaLiteExperimental.dll"); AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(Handlers.UnhandledExceptionHandler); Console.CancelKeyPress += new ConsoleCancelEventHandler(AbortHandler); // recommender arguments string method = "BiasedMatrixFactorization"; string recommender_options = string.Empty; // help/version bool show_help = false; bool show_version = false; // arguments for iteration search int find_iter = 0; int max_iter = 500; double epsilon = 0; double rmse_cutoff = double.MaxValue; double mae_cutoff = double.MaxValue; // data arguments string data_dir = string.Empty; string user_attributes_file = string.Empty; string item_attributes_file = string.Empty; string user_relations_file = string.Empty; string item_relations_file = string.Empty; // other arguments bool online_eval = false; bool search_hp = false; string save_model_file = string.Empty; string load_model_file = string.Empty; int random_seed = -1; string prediction_file = string.Empty; string prediction_line = "{0}\t{1}\t{2}"; int cross_validation = 0; double split_ratio = 0; var p = new OptionSet() { // string-valued options { "training-file=", v => training_file = v }, { "test-file=", v => test_file = v }, { "recommender=", v => method = v }, { "recommender-options=", v => recommender_options += " " + v }, { "data-dir=", v => data_dir = v }, { "user-attributes=", v => user_attributes_file = v }, { "item-attributes=", v => item_attributes_file = v }, { "user-relations=", v => user_relations_file = v }, { "item-relations=", v => item_relations_file = v }, { "save-model=", v => save_model_file = v }, { "load-model=", v => load_model_file = v }, { "prediction-file=", v => prediction_file = v }, { "prediction-line=", v => prediction_line = v }, // integer-valued options { "find-iter=", (int v) => find_iter = v }, { "max-iter=", (int v) => max_iter = v }, { "random-seed=", (int v) => random_seed = v }, { "cross-validation=", (int v) => cross_validation = v }, // double-valued options { "epsilon=", (double v) => epsilon = v }, { "rmse-cutoff=", (double v) => rmse_cutoff = v }, { "mae-cutoff=", (double v) => mae_cutoff = v }, { "split-ratio=", (double v) => split_ratio = v }, // enum options { "rating-type=", (RatingType v) => rating_type = v }, { "file-format=", (RatingFileFormat v) => file_format = v }, // boolean options { "compute-fit", v => compute_fit = v != null }, { "online-evaluation", v => online_eval = v != null }, { "search-hp", v => search_hp = v != null }, { "help", v => show_help = v != null }, { "version", v => show_version = v != null }, }; IList <string> extra_args = p.Parse(args); // TODO make sure interaction of --find-iter and --cross-validation works properly bool no_eval = test_file == null; if (show_version) { ShowVersion(); } if (show_help) { Usage(0); } if (extra_args.Count > 0) { Usage("Did not understand " + extra_args[0]); } if (training_file == null) { Usage("Parameter --training-file=FILE is missing."); } if (cross_validation != 0 && split_ratio != 0) { Usage("--cross-validation=K and --split-ratio=NUM are mutually exclusive."); } if (random_seed != -1) { MyMediaLite.Util.Random.InitInstance(random_seed); } recommender = Recommender.CreateRatingPredictor(method); if (recommender == null) { Usage(string.Format("Unknown method: '{0}'", method)); } Recommender.Configure(recommender, recommender_options, Usage); // ID mapping objects if (file_format == RatingFileFormat.KDDCUP_2011) { user_mapping = new IdentityMapping(); item_mapping = new IdentityMapping(); } // load all the data LoadData(data_dir, user_attributes_file, item_attributes_file, user_relations_file, item_relations_file, !online_eval); Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "ratings range: [{0}, {1}]", recommender.MinRating, recommender.MaxRating)); if (split_ratio > 0) { var split = new RatingsSimpleSplit(training_data, split_ratio); recommender.Ratings = split.Train[0]; training_data = split.Train[0]; test_data = split.Test[0]; } Utils.DisplayDataStats(training_data, test_data, recommender); if (find_iter != 0) { if (!(recommender is IIterativeModel)) { Usage("Only iterative recommenders support find_iter."); } var iterative_recommender = (IIterativeModel)recommender; Console.WriteLine(recommender.ToString() + " "); if (load_model_file == string.Empty) { recommender.Train(); } else { Recommender.LoadModel(iterative_recommender, load_model_file); } if (compute_fit) { Console.Write(string.Format(CultureInfo.InvariantCulture, "fit {0,0:0.#####} ", iterative_recommender.ComputeFit())); } MyMediaLite.Eval.Ratings.DisplayResults(MyMediaLite.Eval.Ratings.Evaluate(recommender, test_data)); Console.WriteLine(" iteration " + iterative_recommender.NumIter); for (int i = (int)iterative_recommender.NumIter + 1; i <= max_iter; i++) { TimeSpan time = Utils.MeasureTime(delegate() { iterative_recommender.Iterate(); }); training_time_stats.Add(time.TotalSeconds); if (i % find_iter == 0) { if (compute_fit) { double fit = 0; time = Utils.MeasureTime(delegate() { fit = iterative_recommender.ComputeFit(); }); fit_time_stats.Add(time.TotalSeconds); Console.Write(string.Format(CultureInfo.InvariantCulture, "fit {0,0:0.#####} ", fit)); } Dictionary <string, double> results = null; time = Utils.MeasureTime(delegate() { results = MyMediaLite.Eval.Ratings.Evaluate(recommender, test_data); }); eval_time_stats.Add(time.TotalSeconds); MyMediaLite.Eval.Ratings.DisplayResults(results); rmse_eval_stats.Add(results["RMSE"]); Console.WriteLine(" iteration " + i); Recommender.SaveModel(recommender, save_model_file, i); if (prediction_file != string.Empty) { Prediction.WritePredictions(recommender, test_data, user_mapping, item_mapping, prediction_line, prediction_file + "-it-" + i); } if (epsilon > 0.0 && results["RMSE"] - rmse_eval_stats.Min() > epsilon) { Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "{0} >> {1}", results["RMSE"], rmse_eval_stats.Min())); Console.Error.WriteLine("Reached convergence on training/validation data after {0} iterations.", i); break; } if (results["RMSE"] > rmse_cutoff || results["MAE"] > mae_cutoff) { Console.Error.WriteLine("Reached cutoff after {0} iterations.", i); break; } } } // for DisplayStats(); } else { TimeSpan seconds; if (load_model_file == string.Empty) { if (cross_validation > 0) { Console.Write(recommender.ToString()); Console.WriteLine(); var split = new RatingCrossValidationSplit(training_data, cross_validation); var results = MyMediaLite.Eval.Ratings.EvaluateOnSplit(recommender, split); // TODO if (search_hp) MyMediaLite.Eval.Ratings.DisplayResults(results); no_eval = true; recommender.Ratings = training_data; } else { if (search_hp) { // TODO --search-hp-criterion=RMSE double result = NelderMead.FindMinimum("RMSE", recommender); Console.Error.WriteLine("estimated quality (on split) {0}", result.ToString(CultureInfo.InvariantCulture)); // TODO give out hp search time } Console.Write(recommender.ToString()); seconds = Utils.MeasureTime(delegate() { recommender.Train(); }); Console.Write(" training_time " + seconds + " "); } } else { Recommender.LoadModel(recommender, load_model_file); Console.Write(recommender.ToString() + " "); } if (!no_eval) { if (online_eval) // TODO support also for prediction outputs (to allow external evaluation) { seconds = Utils.MeasureTime(delegate() { MyMediaLite.Eval.Ratings.DisplayResults(MyMediaLite.Eval.Ratings.EvaluateOnline(recommender, test_data)); }); } else { seconds = Utils.MeasureTime(delegate() { MyMediaLite.Eval.Ratings.DisplayResults(MyMediaLite.Eval.Ratings.Evaluate(recommender, test_data)); }); } Console.Write(" testing_time " + seconds); } if (compute_fit) { Console.Write("fit "); seconds = Utils.MeasureTime(delegate() { MyMediaLite.Eval.Ratings.DisplayResults(MyMediaLite.Eval.Ratings.Evaluate(recommender, training_data)); }); Console.Write(string.Format(CultureInfo.InvariantCulture, " fit_time {0,0:0.#####} ", seconds)); } if (prediction_file != string.Empty) { seconds = Utils.MeasureTime(delegate() { Console.WriteLine(); Prediction.WritePredictions(recommender, test_data, user_mapping, item_mapping, prediction_line, prediction_file); }); Console.Error.Write("predicting_time " + seconds); } Console.WriteLine(); Console.Error.WriteLine("memory {0}", Memory.Usage); } Recommender.SaveModel(recommender, save_model_file); }