public Fraction SolveWithArtificialBasic() { // Получаем начальную декомпозицию для искусственного базиса var dec = _augmentedConstraintList.DecompositionForArtificialBasic(); // Вычисляем целевую функцию по декомпозиции для искусственного базиса var coeffs = new Fraction[dec.FreeVariables.Length + 1]; for (var i = 0; i < coeffs.Length; i++) { Fraction sum = 0; for (var j = 0; j < dec.BasicVariables.Length; j++) sum += dec.Coefficients[j, i]; coeffs[i] = -sum; } coeffs[coeffs.Length - 1] *= -1; // ! Потом будет поменян знак, т.к. в симлекс таблице мы записываем обратное значение var objFunc = new ObjectiveFunction(coeffs); // Приводим составленую симплекс таблицу к нужному виду var artBasic = new SimplexTable(dec, objFunc, _loggerForArtBasic, _userChoiceForArtBasic, _isDecimalFractions) .ToArtificialBasic(); _objectiveFunction.Substitution(artBasic); return new SimplexTable(artBasic, _objectiveFunction, _logger, _userChoice, _isDecimalFractions) .Calculate(); }
public void ObjectiveFunction_Substitution_Successful( ObjectiveFunction objectiveFunction, Decomposition decomposition, ObjectiveFunction expected) { objectiveFunction.Substitution(decomposition); CollectionAssert.AreEqual(expected.ToArray(), objectiveFunction.ToArray()); }
public void SimplexTable_Solve_Successful( Decomposition decomposition, ObjectiveFunction objectiveFunction, Fraction expected) { var dut = new SimplexTable(decomposition, objectiveFunction, _logger).Calculate(); Assert.AreEqual(expected, dut); }
public void SimplexMethod_Solve_Successful( ObjectiveFunction objectiveFunction, Matrix matrix, int[] cornerPoint, Fraction expected) { var simplexMethodSolver = new SimplexMethodSolver(objectiveFunction, matrix, cornerPoint, _logger); var actual = simplexMethodSolver.Solve(); Assert.AreEqual(expected, actual); }
/// <summary> /// Find vector x that minimizes the function f(x) using the Broyden–Fletcher–Goldfarb–Shanno (BFGS) algorithm. /// For more options and diagnostics consider to use <see cref="BfgsMinimizer"/> directly. /// An alternative routine using conjugate gradients (CG) is available in <see cref="ConjugateGradientMinimizer"/>. /// </summary> public static Vector <double> OfFunctionGradient(Func <Vector <double>, double> function, Func <Vector <double>, Vector <double> > gradient, Vector <double> initialGuess, double gradientTolerance = 1e-5, double parameterTolerance = 1e-5, double functionProgressTolerance = 1e-5, int maxIterations = 1000) { var objective = ObjectiveFunction.Gradient(function, gradient); var algorithm = new BfgsMinimizer(gradientTolerance, parameterTolerance, functionProgressTolerance, maxIterations); var result = algorithm.FindMinimum(objective, initialGuess); return(result.MinimizingPoint); }
public PSO(ObjectiveFunction _objf, Response _res, Variable _var) { this.objf = _objf; this.response = _res; this.variables = _var; Settings.aaa(); }
public override Harmony <StopTimeInfo> GenerateRandomHarmony() { var randomArguments = GetRandomArguments(); var objectiveValue = ObjectiveFunction.GetObjectiveValue(randomArguments); return(new Harmony <StopTimeInfo>(objectiveValue, randomArguments)); }
/// <summary> /// Find vector x that minimizes the function f(x), constrained within bounds, using the Broyden–Fletcher–Goldfarb–Shanno Bounded (BFGS-B) algorithm. /// For more options and diagnostics consider to use <see cref="BfgsBMinimizer"/> directly. /// </summary> public static Vector <double> OfFunctionGradientConstrained(Func <Vector <double>, Tuple <double, Vector <double> > > functionGradient, Vector <double> lowerBound, Vector <double> upperBound, Vector <double> initialGuess, double gradientTolerance = 1e-5, double parameterTolerance = 1e-5, double functionProgressTolerance = 1e-5, int maxIterations = 1000) { var objective = ObjectiveFunction.Gradient(functionGradient); var algorithm = new BfgsBMinimizer(gradientTolerance, parameterTolerance, functionProgressTolerance, maxIterations); var result = algorithm.FindMinimum(objective, lowerBound, upperBound, initialGuess); return(result.MinimizingPoint); }
public void FindMinimum_Rosenbrock_Overton() { var obj = ObjectiveFunction.Gradient(RosenbrockFunction.Value, RosenbrockFunction.Gradient); var solver = new LimitedMemoryBfgsMinimizer(1e-5, 1e-5, 1e-5, 5, 100); var result = solver.FindMinimum(obj, new DenseVector(new[] { -0.9, -0.5 })); Assert.That(Math.Abs(result.MinimizingPoint[0] - RosenbrockFunction.Minimum[0]), Is.LessThan(1e-3)); Assert.That(Math.Abs(result.MinimizingPoint[1] - RosenbrockFunction.Minimum[1]), Is.LessThan(1e-3)); }
public void FindMinimum_BigRosenbrock_Hard() { var obj = ObjectiveFunction.Gradient(BigRosenbrockFunction.Value, BigRosenbrockFunction.Gradient); var solver = new LimitedMemoryBfgsMinimizer(1e-5, 1e-5, 1e-5, 5, 1000); var result = solver.FindMinimum(obj, new DenseVector(new[] { -1.2 * 100.0, 1.0 * 100.0 })); Assert.That(Math.Abs(result.MinimizingPoint[0] - BigRosenbrockFunction.Minimum[0]), Is.LessThan(1e-3)); Assert.That(Math.Abs(result.MinimizingPoint[1] - BigRosenbrockFunction.Minimum[1]), Is.LessThan(1e-3)); }
public void Test_ExpansionWorks() { var algorithm = new GoldenSectionMinimizer(1e-5, 1000); var f1 = new Func <double, double>(x => (x - 3) * (x - 3)); var obj = ObjectiveFunction.ScalarValue(f1); var r1 = algorithm.FindMinimum(obj, -5, 5); Assert.That(Math.Abs(r1.MinimizingPoint - 3.0), Is.LessThan(1e-4)); }
public void FindMinimum_Rosenbrock_Hard() { var obj = ObjectiveFunction.GradientHessian(point => Tuple.Create(RosenbrockFunction.Value(point), RosenbrockFunction.Gradient(point), RosenbrockFunction.Hessian(point))); var solver = new NewtonMinimizer(1e-5, 1000); var result = solver.FindMinimum(obj, new DenseVector(new[] { -1.2, 1.0 })); Assert.That(Math.Abs(result.MinimizingPoint[0] - 1.0), Is.LessThan(1e-3)); Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(1e-3)); }
public SimplexTable(Decomposition decomposition, ObjectiveFunction objectiveFunction, ILogger logger = null, UserChoice userChoice = null, bool isDecimalFractions = false) { _decomposition = decomposition; _logger = logger; _userChoice = userChoice; _isDecimalFractions = isDecimalFractions; _shortObjectiveFunction = ConvertToShortObjectiveFunction(decomposition, objectiveFunction); }
public void FindMinimum_Rosenbrock_Hard() { var obj = ObjectiveFunction.Gradient(RosenbrockFunction.Value, RosenbrockFunction.Gradient); var solver = new ConjugateGradientMinimizer(1e-5, 1000); var result = solver.FindMinimum(obj, new DenseVector(new[] { -1.2, 1.0 })); Assert.That(Math.Abs(result.MinimizingPoint[0] - 1.0), Is.LessThan(1e-3)); Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(1e-3)); }
public void FindMinimum_Rosenbrock_Easy() { var obj = ObjectiveFunction.GradientHessian(RosenbrockFunction.Value, RosenbrockFunction.Gradient, RosenbrockFunction.Hessian); var solver = new NewtonMinimizer(1e-5, 1000); var result = solver.FindMinimum(obj, new DenseVector(new[] { 1.2, 1.2 })); Assert.That(Math.Abs(result.MinimizingPoint[0] - 1.0), Is.LessThan(1e-3)); Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(1e-3)); }
/// <summary> /// Find vector x that minimizes the function f(x), constrained within bounds, using the Broyden–Fletcher–Goldfarb–Shanno Bounded (BFGS-B) algorithm. /// The missing gradient is evaluated numerically (forward difference). /// For more options and diagnostics consider to use <see cref="BfgsBMinimizer"/> directly. /// </summary> public static Vector <double> OfFunctionConstrained(Func <Vector <double>, double> function, Vector <double> lowerBound, Vector <double> upperBound, Vector <double> initialGuess, double gradientTolerance = 1e-5, double parameterTolerance = 1e-5, double functionProgressTolerance = 1e-5, int maxIterations = 1000) { var objective = ObjectiveFunction.Value(function); var objectiveWithGradient = new Optimization.ObjectiveFunctions.ForwardDifferenceGradientObjectiveFunction(objective, lowerBound, upperBound); var algorithm = new BfgsBMinimizer(gradientTolerance, parameterTolerance, functionProgressTolerance, maxIterations); var result = algorithm.FindMinimum(objectiveWithGradient, lowerBound, upperBound, initialGuess); return(result.MinimizingPoint); }
/* * Input: * Data file name * Output: * none * * Parse input data and loops over scans, performing scan matching */ public static void RunScanMatch(String inpF) { //Call Init Init(inpF); //Initialise Optimisation routine var obj = ObjectiveFunction.GradientHessian(Cost); var solver = new ConjugateGradientMinimizer(1e0, 30); //(1e-5, 100, false); Vector <double> x_init = Vector <double> .Build.DenseOfArray(new double[] { 0.0, 0.0, 0.0 }); Vector <double> Xopt = Vector <double> .Build.DenseOfArray(new double[] { 0.0, 0.0, 0.0 }); //Initialise reference point cloud, expressed in map coordinates. rPNn var rBNn = Vector <double> .Build.Dense(2); rBNn[0] = Pose[0][0]; rBNn[1] = Pose[0][1]; var Rnb = SO2.EulerRotation(Pose[0][2]); rEBb = GetPointCloudFromRange(Range[0]); var rPNn_new = Matrix <double> .Build.DenseOfMatrix(rEBb); for (int j = 0; j < rPNn_new.ColumnCount; j++) { rPNn_new.SetColumn(j, rBNn.Add(Rnb.Multiply(rEBb.Column(j)))); } rPNn = rPNn_new; //Loop through data, setting up and running optimisation routine each time. for (int i = 1; i < Range.Count(); i++) { //Initialise independent point cloud, expressed in body coordinates.rPBb rEBb = GetPointCloudFromRange(Range[i]); //Set up initial conditions x_init.SetValues(Pose[i]); //Solve var result = solver.FindMinimum(obj, x_init); Xopt = result.MinimizingPoint; rBNn = Vector <double> .Build.Dense(2); rBNn[0] = Xopt[0]; rBNn[1] = Xopt[1]; Rnb = SO2.EulerRotation(Xopt[2]); //Append to PointCloud rPNn_new = Matrix <double> .Build.DenseOfMatrix(rEBb); for (int j = 0; j < rPNn_new.ColumnCount; j++) { rPNn_new.SetColumn(j, rBNn.Add(Rnb.Multiply(rEBb.Column(j)))); } rPNn = rPNn.Append(rPNn_new); } }
public void StandarizeConstraintAndFOTest() { PrimalSimplexService simplexService = new PrimalSimplexService(new VectorOperations(new VectorHelper()), new VectorHelper()); Constraint c1 = new Constraint(new Dictionary <string, double> { { "x1", 2 }, { "x2", 2 }, { "x3", 3 } }, Simplex.Entities.EComparator.GreaterEqualThan, 15); c1.Name = "r1"; Constraint c2 = new Constraint(new Dictionary <string, double> { { "x1", 2 }, { "x2", 3 }, { "x3", 1 } }, Simplex.Entities.EComparator.LessEqualThan, 12); c2.Name = "r2"; ObjectiveFunction fo = new ObjectiveFunction(new Dictionary <string, double> { { "x1", 3 }, { "x2", 2 }, { "x3", 4 } }, false); fo.Name = "FO"; List <Constraint> constraints = new List <Constraint> { c1, c2 }; List <Constraint> result = simplexService.StandarizeConstraint(constraints, out List <string> header).ToList(); fo = simplexService.StandarizeObjectiveFunction(fo, header); Assert.Equal(0, result.Where(c => c.Name.Equals("r1")).FirstOrDefault().VectorBody["S1"]); Assert.Equal(-1, result.Where(c => c.Name.Equals("r1")).FirstOrDefault().VectorBody["e1"]); Assert.Equal(1, result.Where(c => c.Name.Equals("r1")).FirstOrDefault().VectorBody["A1"]); Assert.Equal(1, result.Where(c => c.Name.Equals("r2")).FirstOrDefault().VectorBody["S1"]); Assert.Equal(0, result.Where(c => c.Name.Equals("r2")).FirstOrDefault().VectorBody["e1"]); Assert.Equal(0, result.Where(c => c.Name.Equals("r2")).FirstOrDefault().VectorBody["A1"]); Assert.Equal(-3, fo.VectorBody["x1"]); Assert.Equal(-2, fo.VectorBody["x2"]); Assert.Equal(-4, fo.VectorBody["x3"]); Assert.Equal(0, fo.VectorBody["S1"]); Assert.Equal(0, fo.VectorBody["e1"]); Assert.Equal(1, fo.VectorBody["A1"]); Assert.True(header.Contains("S1") && header.Contains("e1") && header.Contains("A1")); }
//added a listbox return to retrieve result during runtime at different stages (before optimisation/ after) private ListBox ShowEvaluations() { ObjectiveFunction obj = new ObjectiveFunction(); List <Individual> inds = _ga.EvaluatePopulation(obj.Evaluate, _dbh); listBox3.Items.Clear(); listBox4.Items.Clear(); dataGridView2.Rows.Clear(); dataGridView2.ColumnCount = SysConfig.chromeLength; for (var i = 0; i < inds.Count; i++) { listBox3.Items.Add(inds[i].ObjectiveValue.ToString()); listBox4.Items.Add("Tour: " + inds[i].TourViolation.ToString() + " Continent: " + inds[i].ContinentViolation.ToString()); var row = new DataGridViewRow(); for (int j = 0; j < inds[i].Cities.Length; j++) { row.Cells.Add(new DataGridViewTextBoxCell() { Value = Convert.ToInt16(inds[i].Cities[j]) }); } dataGridView2.Rows.Add(row); } var best = _ga.GetFittestIndividual(); listBox5.Items.Clear(); string cities = ""; foreach (var c in best.TravelOrder) { cities += c + ", "; } listBox5.Items.Add(cities); listBox5.Items.Add("Fitness: " + best.ObjectiveValue); listBox5.Items.Add("Tour Violation: " + best.TourViolation); listBox5.Items.Add("Continent Violation: " + best.ContinentViolation); listBox5.Items.Add("Num cities: " + best.CountriesVisited); bool[] visited = new bool[SysConfig.chromeLength]; foreach (var i in inds) { for (var c = 0; c < i.Cities.Length; c++) { if (i.Cities[c]) { visited[c] = true; } } } int citiesMapped = visited.Where(x => x == true).Count(); listBox8.Items.Clear(); listBox8.Items.Add(citiesMapped); return(listBox5); }
/// <summary> /// Construtor padrão /// </summary> /// <param name="po">Funcao objetiva</param> /// <param name="restrictions">Lista de restricoes</param> public Simplex(ObjectiveFunction po, List <Restriction> restrictions) { objectiveFunction = po; restrictionsList = restrictions; table = new Tuple <double, double> [restrictions.Count( ) + 1, po.Z.Count( ) + 1]; columnPositions = new string[po.Z.Count( ) + 1]; linePositions = new string[restrictions.Count( ) + 1]; }
public BAforCOP(int numberOfPreys, double[] upBound, double[] lowBound, OptimizationType type, ObjectiveFunction objFun) { this.numberOfPreys = numberOfPreys; lowerBound = lowBound; upperBound = upBound; optimizationTpe = type; //完全讓constructor決定就好 objFunction = objFun; soFarTheBestSolution = new double[numberOfPreys]; }
public PSOforCOP(int numberOfVariables, double[] upBound, double[] lowBound, ObjectiveFunction objFun) { this.numberOfVariables = numberOfVariables; LowerBound = lowBound; UpperBound = upBound; //this.optimizationTpe = type; //完全讓constructor決定就好 objFunction = objFun; SoFarTheBestSolution = new double[numberOfVariables]; }
// try this for multiparameter optimization: https://numerics.mathdotnet.com/api/MathNet.Numerics.Optimization.TrustRegion/index.htm // Golden Section Minimizer public static Value Argmin(Value function, Value lowerBound, Value upperBound, Value tolerance, Netlist netlist, Style style, int s) { if (!(lowerBound is NumberValue) || !(upperBound is NumberValue)) { throw new Error("argmin: expecting numbers for lower and upper bounds"); } double lower = (lowerBound as NumberValue).value; double upper = (upperBound as NumberValue).value; if (lower > upper) { throw new Error("argmin: lower bound greater than upper bound"); } if (!(function is FunctionValue)) { throw new Error("argmin: expecting a function as first argument"); } FunctionValue closure = function as FunctionValue; if (closure.parameters.parameters.Count != 1) { throw new Error("argmin: initial values and function parameters have different lengths"); } IScalarObjectiveFunction objectiveFunction = ObjectiveFunction.ScalarValue( (double parameter) => { List <Value> arguments = new List <Value>(); arguments.Add(new NumberValue(parameter)); bool autoContinue = netlist.autoContinue; netlist.autoContinue = true; Value result = closure.ApplyReject(arguments, netlist, style, s); if (result == null) { throw new Error("Objective function returned null"); } netlist.autoContinue = autoContinue; if (!(result is NumberValue)) { throw new Error("Objective function must return a number, not: " + result.Format(style)); } KGui.gui.GuiOutputAppendText("argmin: parameter=" + Style.FormatSequence(arguments, ", ", x => x.Format(style)) + " => cost=" + result.Format(style) + Environment.NewLine); return((result as NumberValue).value); }); try { ScalarMinimizationResult result = GoldenSectionMinimizer.Minimum(objectiveFunction, lower, upper); if (result.ReasonForExit == ExitCondition.Converged || result.ReasonForExit == ExitCondition.BoundTolerance) { KGui.gui.GuiOutputAppendText("argmin: converged with parameter=" + result.MinimizingPoint + " and reason '" + result.ReasonForExit + "'" + Environment.NewLine); return(new NumberValue(result.MinimizingPoint)); } else { throw new Error("reason '" + result.ReasonForExit.ToString() + "'"); } } catch (Exception e) { throw new Error("argmin ended: " + ((e.InnerException == null) ? e.Message : e.InnerException.Message)); } // somehow we need to recatch the inner exception coming from CostAndGradient }
public void PollutionWithWeights() { var obj = ObjectiveFunction.NonlinearModel(PollutionModel, PollutionX, PollutionY, PollutionW, accuracyOrder: 6); var solver = new LevenbergMarquardtMinimizer(); var result = solver.FindMinimum(obj, PollutionStart); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(PollutionBest[i], result.MinimizingPoint[i], 4); } }
public MinimizationResult Train() { Vector <double> theta = Vector <double> .Build.Dense(X.ColumnCount); LinearRegression lr = new LinearRegression(this.X, this.y, this.Lambda); var obj = ObjectiveFunction.Gradient(lr.Cost, lr.Gradient); var solver = new BfgsMinimizer(1e-5, 1e-5, 1e-5, 200); MinimizationResult result = solver.FindMinimum(obj, theta); return(result); }
public void Thurber_LBfgs_Dif() { var obj = ObjectiveFunction.NonlinearFunction(ThurberModel, ThurberX, ThurberY, accuracyOrder: 6); var solver = new LimitedMemoryBfgsMinimizer(1e-10, 1e-10, 1e-10, 1000); var result = solver.FindMinimum(obj, ThurberStart); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(ThurberPbest[i], result.MinimizingPoint[i], 6); } }
public void BoxBod_Newton_Der() { var obj = ObjectiveFunction.NonlinearFunction(BoxBodModel, BoxBodPrime, BoxBodX, BoxBodY); var solver = new NewtonMinimizer(1e-10, 100); var result = solver.FindMinimum(obj, BoxBodStart2); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(BoxBodPbest[i], result.MinimizingPoint[i], 6); } }
public void Rat43_LBfgs_Dif() { var obj = ObjectiveFunction.NonlinearFunction(Rat43Model, Rat43X, Rat43Y, accuracyOrder: 6); var solver = new LimitedMemoryBfgsMinimizer(1e-10, 1e-10, 1e-10, 1000); var result = solver.FindMinimum(obj, Rat43Start2); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(Rat43Pbest[i], result.MinimizingPoint[i], 2); } }
public void Rosenbrock_Bfgs_Dif() { var obj = ObjectiveFunction.NonlinearFunction(RosenbrockModel, RosenbrockX, RosenbrockY, accuracyOrder: 6); var solver = new BfgsMinimizer(1e-8, 1e-8, 1e-8, 1000); var result = solver.FindMinimum(obj, RosenbrockStart1); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(RosenbrockPbest[i], result.MinimizingPoint[i], 2); } }
public bool ComprobarSiFinalizaSimplex(ObjectiveFunction fo) { bool siFinaliza = false; if (fo != null) { siFinaliza = !fo.CuerpoNum.Any(n => n < 0) && fo.TerminoIndependiente > 0; } return(siFinaliza); }
public void NMS_FindMinimum_Rosenbrock_Easy() { var obj = ObjectiveFunction.Value(RosenbrockFunction.Value); var solver = new NelderMeadSimplex(Tolerance * 0.1, maximumIterations: 1000); var initialGuess = new DenseVector(new[] { 1.2, 1.2 }); var result = solver.FindMinimum(obj, initialGuess); Assert.That(Math.Abs(result.MinimizingPoint[0] - 1.0), Is.LessThan(Tolerance)); Assert.That(Math.Abs(result.MinimizingPoint[1] - 1.0), Is.LessThan(Tolerance)); }
public void Thurber_LM_Dif() { var obj = ObjectiveFunction.NonlinearModel(ThurberModel, ThurberX, ThurberY, accuracyOrder: 6); var solver = new LevenbergMarquardtMinimizer(); var result = solver.FindMinimum(obj, ThurberStart); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(ThurberPbest[i], result.MinimizingPoint[i], 6); AssertHelpers.AlmostEqualRelative(ThurberPstd[i], result.StandardErrors[i], 6); } }
public void BoxBod_TRNCG_Dif() { var obj = ObjectiveFunction.NonlinearModel(BoxBodModel, BoxBodX, BoxBodY, accuracyOrder: 6); var solver = new TrustRegionNewtonCGMinimizer(); var result = solver.FindMinimum(obj, BoxBodStart2); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(BoxBodPbest[i], result.MinimizingPoint[i], 3); AssertHelpers.AlmostEqualRelative(BoxBodPstd[i], result.StandardErrors[i], 3); } }
public void Rat43_TRDL_Dif() { var obj = ObjectiveFunction.NonlinearModel(Rat43Model, Rat43X, Rat43Y, accuracyOrder: 6); var solver = new TrustRegionDogLegMinimizer(); var result = solver.FindMinimum(obj, Rat43Start2); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(Rat43Pbest[i], result.MinimizingPoint[i], 2); AssertHelpers.AlmostEqualRelative(Rat43Pstd[i], result.StandardErrors[i], 2); } }
public void Thurber_TRNCG_Dif() { var obj = ObjectiveFunction.NonlinearModel(ThurberModel, ThurberX, ThurberY, accuracyOrder: 6); var solver = new TrustRegionNewtonCGMinimizer(); var result = solver.FindMinimum(obj, ThurberStart, scales: ThurberScales); for (int i = 0; i < result.MinimizingPoint.Count; i++) { AssertHelpers.AlmostEqualRelative(ThurberPbest[i], result.MinimizingPoint[i], 3); AssertHelpers.AlmostEqualRelative(ThurberPstd[i], result.StandardErrors[i], 3); } }
public void Test1() { var dec = new Decomposition { BasicVariables = new[] {5, 6, 7}, FreeVariables = new[] {0, 1, 2, 3, 4}, Coefficients = new Fraction[,] {{-2, 4, 1, -1, 0, 3}, {4, -3, -1, 1, 1, 6}, {1, 4, 1, 0, 1, 15}} }; var objFunc = new ObjectiveFunction(new Fraction[] {-3, -5, -1, 0, -2, 24}); var dut = new SimplexTable(dec, objFunc, new Logger()); var artBasic = dut.ToArtificialBasic(); }
public SimplexMethodSolver(ObjectiveFunction objectiveFunction, Matrix augmentedConstraintList, IEnumerable<int> cornerPoint = null, ILogger logger = null, ILogger loggerForArtBasic = null, UserChoice userChoice = null, UserChoice userChoiceForArtBasic = null, bool isDecimalFractions = false) { _objectiveFunction = objectiveFunction; _augmentedConstraintList = augmentedConstraintList; _logger = logger; _loggerForArtBasic = loggerForArtBasic; _userChoice = userChoice; _userChoiceForArtBasic = userChoiceForArtBasic; _isDecimalFractions = isDecimalFractions; _cornerPoint = cornerPoint as IReadOnlyList<int>; }
/// <summary> /// Выделение из полной целевой функции нужных коэффициентов по декомпозиции /// </summary> private static ObjectiveFunction ConvertToShortObjectiveFunction(Decomposition decomposition, ObjectiveFunction objectiveFunction) { var shortObjectiveFunction = new List<Fraction>(); for (var i = 0; i < objectiveFunction.Count(); i++) { if (decomposition.FreeVariables.Contains(i)) shortObjectiveFunction.Add(objectiveFunction[i]); } shortObjectiveFunction.Add(objectiveFunction.Last()); return new ObjectiveFunction(shortObjectiveFunction); }
public void SimplexMethod_Solve_Exception( ObjectiveFunction objectiveFunction, Matrix matrix, int[] cornerPoint) { var simplexMethodSolver = new SimplexMethodSolver(objectiveFunction, matrix, cornerPoint, _logger); Assert.Throws(typeof(Exception), () => simplexMethodSolver.Solve()); }
//===========================================================================// // GFunctionWithShifts // //===========================================================================// public GFunctionWithShifts(CmsCoupon coupon, Handle<Quote> meanReversion) { meanReversion_ = meanReversion; calibratedShift_ = 0.03; tmpRs_ = 10000000.0; accuracy_ = 1.0e-14; SwapIndex swapIndex = coupon.swapIndex(); VanillaSwap swap = swapIndex.underlyingSwap(coupon.fixingDate()); swapRateValue_ = swap.fairRate(); objectiveFunction_ = new ObjectiveFunction(this, swapRateValue_); Schedule schedule = swap.fixedSchedule(); Handle<YieldTermStructure> rateCurve = swapIndex.forwardingTermStructure(); DayCounter dc = swapIndex.dayCounter(); swapStartTime_ = dc.yearFraction(rateCurve.link.referenceDate(), schedule.startDate()); discountAtStart_ = rateCurve.link.discount(schedule.startDate()); double paymentTime = dc.yearFraction(rateCurve.link.referenceDate(), coupon.date()); shapedPaymentTime_ = shapeOfShift(paymentTime); List<CashFlow> fixedLeg = new List<CashFlow>(swap.fixedLeg()); int n = fixedLeg.Count; shapedSwapPaymentTimes_ = new List<double>(); swapPaymentDiscounts_ = new List<double>(); accruals_ = new List<double>(); for (int i = 0; i < n; ++i) { Coupon coupon1 = fixedLeg[i] as Coupon; accruals_.Add(coupon1.accrualPeriod()); Date paymentDate = new Date(coupon1.date().serialNumber()); double swapPaymentTime = dc.yearFraction(rateCurve.link.referenceDate(), paymentDate); shapedSwapPaymentTimes_.Add(shapeOfShift(swapPaymentTime)); swapPaymentDiscounts_.Add(rateCurve.link.discount(paymentDate)); } discountRatio_ = swapPaymentDiscounts_.Last() / discountAtStart_; }
public CMAESData(string distribution, string dimension, ObjectiveFunction objFun, bool dependentModel, DirectoryInfo data) : base(distribution, dimension, DataSet.train, false, objFun == ObjectiveFunction.MinimumRho, data) { AlreadySavedPID = Generation; FileInfo = new FileInfo(String.Format(@"{0}\CMAES\weights\full.{1}.{2}.{3}.weights.{4}.csv", data.FullName, Distribution, Dimension, objFun, dependentModel ? "timedependent" : "timeindependent")); FileInfoResults = new FileInfo( String.Format(@"{0}\CMAES\results\output.{1}.{2}.{3}.weights.{4}.csv", data.FullName, Distribution, Dimension, objFun, dependentModel ? "timedependent" : "timeindependent")); N = NUM_FEATURES; if (dependentModel) N *= NumDimension; StopEval = 50000; // 1e3*N^2; if (FileInfoResults.Exists & !FileInfo.Exists) { ReadFileInfoResults(); AlreadySavedPID = Generation = _output.Count > 0 ? _output[_output.Count - 1].Generation : 0; CountEval = StopEval; // use last results as finished run if (OptimistationComplete) Write(); } if (FileInfo.Exists) { //throw new WarningException(String.Format("Optimistation already completed, see results in {0}", FileInfo.Name)); CountEval = StopEval; return; } //Get the method information using the method info class switch (objFun) { case ObjectiveFunction.MinimumMakespan: _objFun = MinimumMakespan; break; case ObjectiveFunction.MinimumRho: _optMakespans = OptimumArray(); _objFun = MinimumRho; break; } #region -------------------- Initialization -------------------------------- xmean = LinearAlgebra.RandomValues(N); // objective variables initial point sigma = 0.5; _stopFitness = 1e-10; #region Strategy parameter setting: Selection lambda = 4 + (int) Math.Floor(3*Math.Log(N)); // ReSharper disable once LocalVariableHidesMember double mu = lambda/2.0; this.mu = (int) Math.Floor(mu); _population = new Offspring[lambda]; weights = new double[this.mu]; for (int i = 0; i < this.mu; i++) weights[i] = Math.Log(mu + 0.5) - Math.Log(i + 1); // normalize recombination weights array double tmpSum = weights.Sum(); for (int i = 0; i < weights.Length; i++) weights[i] /= tmpSum; mueff = Math.Pow(weights.Sum(), 2)/weights.Sum(w => Math.Pow(w, 2)); #endregion #region Strategy parameter setting: Adaptation cc = (4 + mueff/N)/(N + 4 + 2*mueff/N); cs = (mueff + 2)/(N + mueff + 5); c1 = 2/(Math.Pow(N + 1.3, 2) + mueff); cmu = Math.Min(1 - c1, 2*(mueff - 2 + 1/mueff)/(Math.Pow(N + 2, 2) + mueff)); damps = 1 + 2*Math.Max(0, Math.Sqrt((mueff - 1)/(N + 1)) - 1) + cs; #endregion #region Initialize dynamic (internal) strategy parameters and constants pc = LinearAlgebra.Zeros(N); ps = LinearAlgebra.Zeros(N); B = LinearAlgebra.Eye(N); D = LinearAlgebra.Ones(N); // C = B * diag(D.^2) * B'; C = LinearAlgebra.Multiply(B, LinearAlgebra.Diag(LinearAlgebra.Power(D, 2)), B, true); invsqrtC = LinearAlgebra.InvertSqrtMatrix(B, D); chiN = Math.Sqrt(N)*(1 - 1/(4.0*N) + 1/(21*Math.Pow(N, 2))); #endregion #endregion }
public void SimplexTable_Solve_Unsolvable(Decomposition decomposition, ObjectiveFunction objectiveFunction) { Assert.Throws(typeof(Exception), () => new SimplexTable(decomposition, objectiveFunction, _logger).Calculate()); }