/// <summary> /// <para>Unscented transform parameters optimization procedure.</para> /// <para>The OptimizationMethod param determines the optimization method:</para> /// <para>- OptimizationMethod.RandomShoot - parameters are randomly sampled and the best sample is chosen as optimal; /// <para>- OptimizationMethod.NelderMeed - parameters are optimized with non-gradient Nelder-Meed method.</para> /// <para>The UTOptimizationType type param determines the relation between the optimized variable and the unscented tranform params (see UTParams and its constructors for details). </para> /// <para>- If type is UTOptimizationType.ImplicitAlpha, then the optimized variable is saclar [alpha0];</para> /// <para>- If type is UTOptimizationType.ImplicitAlphaBetaKappa, then optimized variable is a vector [alpha, beta, kappa];</para> /// <para>- If type is UTOptimizationType.Explicit, then then optimized variable is a vector [lambda, wm0, wc0, wi]. ///TODO it is not correct to define the parameters of the unsctnted transform arbitraty, they have to be interdependent, so that the mean and cov would be transformed correctly.</para> /// </summary> /// <param name="method">Unscented transform parameters optimization method</param> /// <param name="type">Unscented transform parameters definition type</param> /// <param name="Phi1">State transformation: a nonlinear function which determines the dynamics: x_{t+1} = Phi_1(x_t) + Phi_2(x_t) W_t</param> /// <param name="Phi2">Noise multiplicator in the dynamics equation: x_{t+1} = Phi(x_t) + W_t</param> /// <param name="Psi1">Observations transformation: a nonlinear function which determines the relation between the state and the observations: y_t = Psi_1(x_t) + Psi_2(x_t) Nu_t</param> /// <param name="Psi2">Noise multiplicator in the observations equation: y_t = Psi_1(x_t) + Psi_2(x_t) Nu_t</param> /// <param name="Mw">Mean of the noise in the dynamics equation </param> /// <param name="Rw">Covariance matrix of the state disturbances</param> /// <param name="Mnu">Mean of the noise in the obseration equation </param> /// <param name="Rnu">Convariance matrix of the observation noise</param> /// <param name="Crit">Criterion: a function which determines the quality of the unscented Kalman filter. Depends on the sample covariance of the estimation error on the last step: val = Crit(Cov(X_T-Xhat_T,X_T-Xhat_T)) </param> /// <param name="T">The upper bound of the observation interval</param> /// <param name="models">Discrete vector model samples</param> /// <param name="xhat0">Initial condition</param> /// <param name="DX0Hat">Initial condition covariance</param> /// <param name="outputFolder">The results are saved to this folder in file "UT_optimization_{type}.txt"</param> static (double, UTParams, UTParams) UTParmsOptimize(OptimizationMethod method, UTDefinitionType type, Func <int, Vector <double>, Vector <double> > Phi1, Func <int, Vector <double>, Matrix <double> > Phi2, Func <int, Vector <double>, Vector <double> > Psi1, Func <int, Vector <double>, Matrix <double> > Psi2, Vector <double> Mw, Matrix <double> Rw, Vector <double> Mnu, Matrix <double> Rnu, Func <Matrix <double>, double> Crit, int T, DiscreteVectorModel[] models, Vector <double> xhat0, Matrix <double> DX0Hat, string outputFolder) { (int n, Vector <double> lowerBound, Vector <double> upperBound, Vector <double> initialGuess, string filename) = DefineOptimizationParameters(type, xhat0, string.IsNullOrWhiteSpace(outputFolder) ? null : Path.Combine(outputFolder, "UT_optimization_{type}.txt")); double min = double.MaxValue; Vector <double> argmin = Exts.Stack(initialGuess, initialGuess); switch (method) { case OptimizationMethod.RandomShoot: var OptimumRandom = RandomOptimizer.Minimize((x) => CalculateSampleCriterion(Phi1, Phi2, Psi1, Psi2, Mw, Rw, Mnu, Rnu, Crit, x, T, models, xhat0, DX0Hat), Exts.Stack(lowerBound, lowerBound), Exts.Stack(upperBound, upperBound), 100, 100, filename); min = OptimumRandom.min; argmin = OptimumRandom.argmin; break; case OptimizationMethod.NelderMeed: NelderMeadSimplex optimizer = new NelderMeadSimplex(1e-3, 100); var objective = ObjectiveFunction.Value((x) => CalculateSampleCriterion(Phi1, Phi2, Psi1, Psi2, Mw, Rw, Mnu, Rnu, Crit, x, T, models, xhat0, DX0Hat)); try { var optimumNM = optimizer.FindMinimum(objective, Exts.Stack(initialGuess, initialGuess)); min = optimumNM.FunctionInfoAtMinimum.Value; argmin = optimumNM.MinimizingPoint; } catch (Exception e) { Console.WriteLine($"Optimizer faild, using the initail guess ({e.Message})"); argmin = Exts.Stack(initialGuess, initialGuess); } break; default: // no optimization by default break; } return(min, new UTParams(xhat0.Count, argmin.Take(n).ToArray()), new UTParams(xhat0.Count, argmin.Skip(n).Take(n).ToArray())); }
static void Run(Options o, string[] args) { if (string.IsNullOrWhiteSpace(o.ScriptsFolder)) { o.ScriptsFolder = "..\\..\\..\\OutputScripts\\"; } Directory.CreateDirectory(o.OutputFolder); using (System.IO.StreamWriter outputfile = new System.IO.StreamWriter(Path.Combine(o.OutputFolder, "parameters.txt"), true)) { outputfile.WriteLine($"{DateTime.Now}\t{string.Join(" ", args)}"); outputfile.Close(); } // original continuous model // x_t = x_0 + \int_0^t Phi_1(x_t) dt + \int_0^t Phi_2(x_t) dW_t // discrete model: // x_{t+1} = Phi_1(x_t) + Phi_2(x_t) W_t //observations // y_t = Psi_1(x_t) + Psi_2(x_t) Nu_t #region 3d model Ienkaran Arasaratnam, Simon Haykin, and Tom R. Hurd //mpdel params //double sigma1 = Math.Sqrt(0.2); //double sigma2 = 7.0 * 1e-3; //double sigma_r = 50; //double sigma_th = 0.1; //double sigma_ph = 0.1; // starting point //Vector<double> mEta = Exts.Vector(1000, 0, 2650, 150, 200, 0, 1.0); //Matrix<double> dEta = Exts.Diag(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0); //RandomVector<Normal> NormalEta = new RandomVector<Normal>(mEta, dEta); //Func<Vector<double>> X0; ////X0 = () => NormalEta.Sample(); //X0 = () => mEta; // dynamics //Func<Vector<double>, Vector<double>> Phi1 = (x) => Exts.Vector(x[1], -x[6] * x[3], x[3], x[6] * x[1], x[5], 0, 0); //Func<Matrix<double>> Phi2 = () => Exts.Diag(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0); //Vector<double> mW = Exts.Vector(0, 0, 0, 0, 0, 0, 0); //Matrix<double> dW = Exts.Diag(1e1, Math.Pow(sigma1, 2), 1e1, Math.Pow(sigma1, 2), 1e1, Math.Pow(sigma1, 2), Math.Pow(sigma2, 2)); //RandomVector<Normal> NormalW = new RandomVector<Normal>(mW, dW); //Func<Vector<double>> W; //W = () => NormalW.Sample(); // observations //Func<Vector<double>, Vector<double>> Psi1 = (x) => Utils.cart2sphere(Exts.Vector(x[0], x[2], x[4])); //Func<Matrix<double>> Psi2 = () => Exts.Diag(1.0, 1.0, 1.0); //Vector<double> mNu = Exts.Vector(0, 0, 0); //Matrix<double> dNu = Exts.Diag(Math.Pow(sigma_r, 2), Math.Pow(sigma_th, 2), Math.Pow(sigma_ph, 2)); ////Vector<double> X_R2 = Exts.Vector(0, 0); ////Vector<double> mNu = Exts.Vector(0, 0, 0, 0); ////Matrix<double> dNu = Exts.Diag(Math.Pow(0.1 * Math.PI / 180, 2), Math.Pow(50, 2), Math.Pow(0.1 * Math.PI / 180, 2), Math.Pow(50, 2)); ////Func<double, Vector<double>, Vector<double>> Psi1 = (s, x) => Exts.Stack(Utils.cart2pol(Exts.Vector(x[0], x[1]) - X_R1), Utils.cart2pol(Exts.Vector(x[0], x[1]) - X_R2)); ////Func<double, Vector<double>, Matrix<double>> Psi2 = (s, x) => Exts.Diag(1.0, 1.0, 1.0, 1.0); //RandomVector<Normal> NormalNu = new RandomVector<Normal>(mNu, dNu); //Func<Vector<double>> Nu; //Nu = () => NormalNu.Sample(); //double h_state = 0.01; //double h_obs = 0.01; //double T = 1.0 + h_state / 2; #endregion #region 2d model with acceleration // model params double Alpha_n = o.alpha; // 0.05; double Beta_n = o.beta; //1.0; double Gamma_n = o.gamma; //0.5; // starting point Vector <double> mEta = Exts.Vector(0, 25000, 400, 10 * Math.PI / 180, Gamma_n / Alpha_n); Matrix <double> dEta = Exts.Diag(Math.Pow(o.DX0, 2), Math.Pow(o.DX0, 2), Math.Pow(115, 2), Math.Pow(15 * Math.PI / 180, 2), Math.Pow(Beta_n, 2) / 2 / Alpha_n); RandomVector <Normal> NormalEta = new RandomVector <Normal>(mEta, dEta); ContinuousUniform UniformEtaV = new ContinuousUniform(200, 600); Func <Vector <double> > X0; X0 = () => { var x = NormalEta.Sample(); x[2] = UniformEtaV.Sample(); return(x); }; // dynamics Func <Vector <double>, Vector <double> > Phi1 = (x) => Exts.Vector(x[2] * Math.Cos(x[3]), x[2] * Math.Sin(x[3]), 0, x[4] / x[2], -Alpha_n * x[4] + Gamma_n); Func <Matrix <double> > Phi2 = () => Exts.Diag(0, 0, 0, 0, 1.0); Vector <double> mW = Exts.Vector(0, 0, 0, 0, 0); Matrix <double> dW = Exts.Diag(0, 0, 0, 0, Math.Pow(Beta_n, 2)); Normal NormalW = new Normal(mW[4], Math.Sqrt(dW[4, 4])); Func <Vector <double> > W; W = () => Exts.Vector(0, 0, 0, 0, NormalW.Sample()); // observations Vector <double> X_R1 = Exts.Vector(-10000, 10000); // first radar location Vector <double> X_R2 = Exts.Vector(0, 0); // second radar location Func <Vector <double>, Vector <double> > Psi1 = (x) => Exts.Stack(Utils.cart2pol(Exts.Vector(x[0], x[1]) - X_R1), Utils.cart2pol(Exts.Vector(x[0], x[1]) - X_R2)); Func <Matrix <double> > Psi2 = () => Exts.Diag(1.0, 1.0, 1.0, 1.0); Vector <double> mNu = Exts.Vector(0, 0, 0, 0); Matrix <double> dNu = Exts.Diag(Math.Pow(0.1 * Math.PI / 180, 2), Math.Pow(50, 2), Math.Pow(0.1 * Math.PI / 180, 2), Math.Pow(50, 2)); RandomVector <Normal> NormalNu = new RandomVector <Normal>(mNu, dNu); Func <Vector <double> > Nu; Nu = () => NormalNu.Sample(); //discretization double h_state = o.h_state; //0.01; double h_obs = o.h_obs; //1.0; double T = o.T + h_state / 2; Func <int, Vector <double>, Vector <double> > Phi1_discr = (i, x) => { Vector <double> xx = x; for (double s = h_state; s < h_obs + h_state / 2; s += h_state) { xx += h_state * Phi1(xx); } return(xx); }; Func <int, Vector <double>, Matrix <double> > Phi2_discr = (i, x) => Math.Sqrt(h_obs) * Phi2(); Func <int, Vector <double>, Vector <double> > Psi1_discr = (i, x) => Psi1(x); Func <int, Vector <double>, Matrix <double> > Psi2_discr = (i, x) => Psi2(); // derivatives for the Extended Kalman Filter Func <Vector <double>, Matrix <double> > dPhi = (x) => Matrix <double> .Build.Dense(5, 5, new double[5 * 5] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Math.Cos(x[3]), Math.Sin(x[3]), 0, -x[4] / Math.Pow(x[2], 2), 0, -x[2] * Math.Sin(x[3]), x[2] * Math.Cos(x[3]), 0, 0, 0, 0, 0, 0, 1.0 / x[2], -Alpha_n }); // Column-major order Func <Vector <double>, Matrix <double> > dPsi = (x) => { var x1 = x - X_R1.Stack(Exts.Vector(0, 0, 0)); var r1r1 = x1[0] * x1[0] + x1[1] * x1[1]; var r1 = Math.Sqrt(r1r1); var x2 = x - X_R2.Stack(Exts.Vector(0, 0, 0)); var r2r2 = x2[0] * x2[0] + x2[1] * x2[1]; var r2 = Math.Sqrt(r2r2); return(Matrix <double> .Build.Dense(4, 5, new double[5 * 4] { -x1[1] / r1r1, x1[0] / r1, -x2[1] / r2r2, x2[0] / r2, x1[0] / r1r1, x1[1] / r1, x2[0] / r2r2, x2[1] / r2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 })); // Column-major order }; Func <int, Vector <double>, Matrix <double>, (Vector <double>, Matrix <double>)> Ricatti = (i, x, P) => { Vector <double> xx = x; Matrix <double> PP = P; for (double s = h_state; s < h_obs + h_state / 2; s += h_state) { PP += h_state * (dPhi(xx) * PP + PP * dPhi(xx).Transpose() + Phi2() * dW * Phi2().Transpose()); xx += h_state * (Phi1(xx) + Phi2() * mW); } return(xx, PP); }; //trivial estimate Func <int, Vector <double>, Vector <double>, Matrix <double>, (Vector <double>, Matrix <double>)> DummyEstimate = (i, y, x, P) => { Vector <double> xx = x; Matrix <double> PP = P; for (double s = h_state; s < h_obs + h_state / 2; s += h_state) { PP += h_state * (dPhi(xx) * PP + PP * dPhi(xx).Transpose() + Phi2() * dW * Phi2().Transpose()); xx += h_state * (Phi1(xx) + Phi2() * mW); } return((0.5 * (Utils.pol2cart(Exts.Vector(y[0], y[1])) + X_R1 + Utils.pol2cart(Exts.Vector(y[2], y[3])) + X_R2)).Stack(Exts.Vector(xx[2], xx[3], xx[4])), PP); }; #endregion int N = (int)(T / h_obs); Func <DiscreteVectorModel> ModelGenerator = () => { DiscreteVectorModel model = null; int n = 0; double h_tolerance = h_state / 2.0; double t_nextobservation = h_obs; Vector <double> State = X0(); Vector <double> Obs; model = new DiscreteVectorModel(Phi1_discr, Phi2_discr, Psi1_discr, Psi2_discr, (i) => W(), (i) => Nu(), X0(), true); //for (double s = 0; s < T; s += h_obs) //{ // model.Step(); //} for (double s = h_state; s < T; s += h_state) { if (s > 0) { State = State + h_state * Phi1(State) + Math.Sqrt(h_state) * Phi2() * W(); } if (Math.Abs(s - t_nextobservation) < h_tolerance) { Obs = Psi1(State) + Psi2() * Nu(); t_nextobservation += h_obs; n++; model.Trajectory.Add(n, new Vector <double>[] { State, Obs }); } } return(model); }; // filter params file names string CMNFFileName = Path.Combine(o.OutputFolder, "cmnf.params"); if (!string.IsNullOrWhiteSpace(o.CMNFFileName)) { CMNFFileName = o.CMNFFileName; } string BCMNFFileName = Path.Combine(o.OutputFolder, "bcmnf.params"); if (!string.IsNullOrWhiteSpace(o.BCMNFFileName)) { BCMNFFileName = o.BCMNFFileName; } string UKFFileName = Path.Combine(o.OutputFolder, "ukf.params"); if (!string.IsNullOrWhiteSpace(o.UKFFileName)) { UKFFileName = o.UKFFileName; } string UKFOptStepwiseNMFileName = Path.Combine(o.OutputFolder, "ukfoptstepwiseNM.params"); if (!string.IsNullOrWhiteSpace(o.UKFStepwiseNelderMeadFileName)) { UKFOptStepwiseNMFileName = o.UKFStepwiseNelderMeadFileName; } string UKFOptIntegralNMFileName = Path.Combine(o.OutputFolder, "ukfoptintegralNM.params"); if (!string.IsNullOrWhiteSpace(o.UKFIntegralNelderMeadFileName)) { UKFOptIntegralNMFileName = o.UKFIntegralNelderMeadFileName; } string UKFOptStepwiseRandFileName = Path.Combine(o.OutputFolder, "ukfoptstepwiserand.params"); if (!string.IsNullOrWhiteSpace(o.UKFStepwiseRandomShootFileName)) { UKFOptStepwiseRandFileName = o.UKFStepwiseRandomShootFileName; } string UKFOptIntegralRandFileName = Path.Combine(o.OutputFolder, "ukfoptintegralrand.params"); if (!string.IsNullOrWhiteSpace(o.UKFIntegralRandomShootFileName)) { UKFOptIntegralRandFileName = o.UKFIntegralRandomShootFileName; } // filters List <(FilterType, string)> filters = new List <(FilterType, string)>(); if (o.CMNF) { filters.Add((FilterType.CMNF, CMNFFileName)); } if (o.BCMNF) { filters.Add((FilterType.BCMNF, BCMNFFileName)); } if (o.MCMNF) { filters.Add((FilterType.MCMNF, string.Empty)); } if (o.UKF) { filters.Add((FilterType.UKFNoOptimization, UKFFileName)); } if (o.UKFStepwiseNelderMead) { filters.Add((FilterType.UKFStepwise, UKFOptStepwiseNMFileName)); } if (o.UKFIntegralNelderMead) { filters.Add((FilterType.UKFIntegral, UKFOptIntegralNMFileName)); } if (o.UKFStepwiseRandomShoot) { filters.Add((FilterType.UKFStepwiseRandomShoot, UKFOptStepwiseRandFileName)); } if (o.UKFIntegralRandomShoot) { filters.Add((FilterType.UKFIntegralRandomShoot, UKFOptIntegralRandFileName)); } if (o.EKF) { filters.Add((FilterType.EKF, string.Empty)); } if (o.Dummy) { filters.Add((FilterType.Dummy, string.Empty)); } // test environment TestEnvironmentVector testEnv = new TestEnvironmentVector() { TestName = "Target tracking", TestFileName = "TargetTracking", Phi1 = Phi1_discr, Phi2 = Phi2_discr, Psi1 = Psi1_discr, Psi2 = Psi2_discr, dPhi = (i, x) => dPhi(x), dPsi = (i, x) => dPsi(x), Xi = (i, x) => Phi1_discr(i, x) + Phi2_discr(i, x) * mW, //Zeta = (i, x, y, k) => (y - Psi1_discr(i, x) - Psi2_discr(i, x) * mNu).Stack(Utils.pol2cart(Exts.Vector(y[0], y[1]))+X_R1).Stack(Utils.pol2cart(Exts.Vector(y[2], y[3]))+X_R2), Zeta = (i, x, y, k) => (y - Psi1_discr(i, x) - Psi2_discr(i, x) * mNu), Alpha = (i, x) => Phi1_discr(i, x) + Phi2_discr(i, x) * mW, Gamma = (i, x, y) => (y).Stack(Utils.pol2cart(Exts.Vector(y[0], y[1])) + X_R1).Stack(Utils.pol2cart(Exts.Vector(y[2], y[3])) + X_R2), //Gamma = (i, x, y) => y - Psi1_discr(i, x) - Psi2_discr(i, x) * mNu, nMCMNF = o.MCMNFTrainCount, W = (i) => W(), Nu = (i) => Nu(), DW = dW, DNu = dNu, X0 = () => X0(), X0Hat = mEta, DX0Hat = dEta, Predict = Ricatti, DummyEstimate = DummyEstimate, ModelGenerator = ModelGenerator }; if (o.Bulk) { testEnv.GenerateBundleSamples(o.T, o.TrainCount, o.OutputFolder); } else { testEnv.Initialize(o.T, o.TrainCount, o.OutputFolder, filters, o.Save, o.Load); if (o.Sift) { testEnv.Sifter = (x) => Math.Sqrt(x[0] * x[0] + x[1] * x[1]) > o.SiftBound; } if (o.Aggregate) { testEnv.Aggregate(o.OutputFolder, o.OutputFolder, !o.NoBin, !o.NoText); } if (!o.Skip) { if (o.SamplesCount == 0) { testEnv.GenerateBundles(o.BundleCount, o.TestCount, o.OutputFolder, o.Parallel, o.ParallelismDegree, !o.NoBin, !o.NoText); if (!o.NoPython) { testEnv.RunScript(Path.Combine(o.ScriptsFolder, "estimate_statistics.py"), o.OutputFolder); } } else { if (o.SamplesCount == 1) { testEnv.GenerateOne(o.OutputFolder); if (!o.NoPython) { testEnv.RunScript(Path.Combine(o.ScriptsFolder, "estimate_sample.py"), o.OutputFolder); testEnv.RunScript(Path.Combine(o.ScriptsFolder, "trajectory.py"), o.OutputFolder); } } else { for (int i = 0; i < o.SamplesCount; i++) { testEnv.GenerateOne(o.OutputFolder, i); } } } } } }
/// <summary> /// <para>Unscented transform parameters stepwize optimization procedure.</para> /// <para>The OptimizationMethod param determines the optimization method:</para> /// <para>- OptimizationMethod.RandomShoot - parameters are randomly sampled and the best sample is chosen as optimal; /// <para>- OptimizationMethod.NelderMeed - parameters are optimized with non-gradient Nelder-Meed method.</para> /// <para>The UTOptimizationType type param determines the relation between the optimized variable and the unscented tranform params (see UTParams and its constructors for details). </para> /// <para>- If type is UTOptimizationType.ImplicitAlpha, then the optimized variable is saclar [alpha0];</para> /// <para>- If type is UTOptimizationType.ImplicitAlphaBetaKappa, then optimized variable is a vector [alpha, beta, kappa];</para> /// <para>- If type is UTOptimizationType.Explicit, then then optimized variable is a vector [lambda, wm0, wc0, wi]. ///TODO it is not correct to define the parameters of the unsctnted transform arbitraty, they have to be interdependent, so that the mean and cov would be transformed correctly.</para> /// </summary> /// <param name="method">Unscented transform parameters optimization method</param> /// <param name="type">Unscented transform parameters definition type</param> /// <param name="Phi1">State transformation: a nonlinear function which determines the dynamics: x_{t+1} = Phi_1(x_t) + Phi_2(x_t) W_t</param> /// <param name="Phi2">Noise multiplicator in the dynamics equation: x_{t+1} = Phi(x_t) + W_t</param> /// <param name="Psi1">Observations transformation: a nonlinear function which determines the relation between the state and the observations: y_t = Psi_1(x_t) + Psi_2(x_t) Nu_t</param> /// <param name="Psi2">Noise multiplicator in the observations equation: y_t = Psi_1(x_t) + Psi_2(x_t) Nu_t</param> /// <param name="Mw">Mean of the noise in the dynamics equation </param> /// <param name="Rw">Covariance matrix of the state disturbances</param> /// <param name="Mnu">Mean of the noise in the obseration equation </param> /// <param name="Rnu">Convariance matrix of the observation noise</param> /// <param name="Crit">Criterion: a function which determines the quality of the unscented Kalman filter. Depends on the sample covariance of the estimation error on the last step: val = Crit(Cov(X_T-Xhat_T,X_T-Xhat_T)) </param> /// <param name="T">The upper bound of the observation interval</param> /// <param name="models">Discrete vector model samples</param> /// <param name="xhat0">Initial condition</param> /// <param name="DX0Hat">Initial condition covariance</param> /// <param name="outputFolder">The results are saved to this folder in file "UT_optimization_{type}.txt"</param> static (double, UTParams[], UTParams[]) UTParmsOptimizeStepwise(OptimizationMethod method, UTDefinitionType type, Func <int, Vector <double>, Vector <double> > Phi1, Func <int, Vector <double>, Matrix <double> > Phi2, Func <int, Vector <double>, Vector <double> > Psi1, Func <int, Vector <double>, Matrix <double> > Psi2, Vector <double> Mw, Matrix <double> Rw, Vector <double> Mnu, Matrix <double> Rnu, Func <Matrix <double>, double> Crit, int T, DiscreteVectorModel[] models, Vector <double> xhat0, Matrix <double> DX0Hat, string outputFolder) { UTParams[] pForecast = new UTParams[T]; UTParams[] pCorrect = new UTParams[T]; (int n, Vector <double> lowerBound, Vector <double> upperBound, Vector <double> initialGuess, string filename) = DefineOptimizationParameters(type, xhat0, string.IsNullOrWhiteSpace(outputFolder) ? null : Path.Combine(outputFolder, "UT_stepwise_ptimization_{type}.txt")); Vector <double>[] xHatU = models.Select(x => xhat0).ToArray(); Matrix <double>[] PHatU = models.Select(x => DX0Hat).ToArray(); double min = double.MaxValue; Console.WriteLine($"UKF estimate parameters start"); DateTime start = DateTime.Now; for (int t = 1; t < T; t++) //Parallel.For(0, T, new ParallelOptions() { MaxDegreeOfParallelism = System.Environment.ProcessorCount }, t => { DateTime startiteration = DateTime.Now; min = double.MaxValue; Vector <double> argmin = initialGuess; switch (method) { case OptimizationMethod.RandomShoot: var OptimumRandom = RandomOptimizer.Minimize((x) => CalculateSampleStepwiseCriterion(Phi1, Phi2, Psi1, Psi2, Mw, Rw, Mnu, Rnu, Crit, x, t, models, xHatU, PHatU), Exts.Stack(lowerBound, lowerBound), Exts.Stack(upperBound, upperBound), 100, 100, filename); min = OptimumRandom.min; argmin = OptimumRandom.argmin; break; case OptimizationMethod.NelderMeed: NelderMeadSimplex optimizer = new NelderMeadSimplex(1e-3, 100); var objective = ObjectiveFunction.Value((x) => CalculateSampleStepwiseCriterion(Phi1, Phi2, Psi1, Psi2, Mw, Rw, Mnu, Rnu, Crit, x, t, models, xHatU, PHatU)); try { var optimumNM = optimizer.FindMinimum(objective, Exts.Stack(initialGuess, initialGuess)); min = optimumNM.FunctionInfoAtMinimum.Value; argmin = optimumNM.MinimizingPoint; } catch (Exception e) { Console.WriteLine($"Optimizer faild, using the initail guess ({e.Message})"); argmin = Exts.Stack(initialGuess, initialGuess); } break; } pForecast[t] = new UTParams(xhat0.Count, argmin.Take(n).ToArray()); pCorrect[t] = new UTParams(xhat0.Count, argmin.Skip(n).Take(n).ToArray()); for (int i = 0; i < models.Count(); i++) { (xHatU[i], PHatU[i]) = Step(Phi1, Phi2, Psi1, Psi2, Mw, Rw, Mnu, Rnu, pForecast[t], pCorrect[t], t, models[i].Trajectory[t][1], xHatU[i], PHatU[i]); } Console.WriteLine($"UKF estimate parameters for t={t}, done in {(DateTime.Now - startiteration).ToString(@"hh\:mm\:ss\.fff")}"); } // }); DateTime finish = DateTime.Now; Console.WriteLine($"UKF estimate parameters finished in {(finish - start).ToString(@"hh\:mm\:ss\.fff")}"); return(min, pForecast, pCorrect); }
static void Run(Options o, string[] args) { if (string.IsNullOrWhiteSpace(o.OutputFolder)) { o.OutputFolder = Settings.Default.OutputFolder; } if (string.IsNullOrWhiteSpace(o.PlotsFolder)) { o.PlotsFolder = Settings.Default.LatexFolder; } if (string.IsNullOrWhiteSpace(o.ScriptsFolder)) { o.ScriptsFolder = Settings.Default.ScriptsFolder; } if (string.IsNullOrWhiteSpace(o.TemplatesFolder)) { o.TemplatesFolder = Settings.Default.LatexFolder; } if (new[] { "sphere", "polar", "polartwo" }.Contains(o.Model)) { #region sphere if (o.Model == "sphere") { int N = o.N; Vector <double> mX = Exts.Vector(30, 40, 100); Matrix <double> KX = Exts.Diag(30 * 30, 30 * 30, 30 * 30); Vector <double> mNu = Exts.Vector(0, 0, 0); Matrix <double> KNu = Exts.Diag(30 * 30, Math.Pow(5 * Math.PI / 180.0, 2.0), Math.Pow(5 * Math.PI / 180.0, 2.0)); Normal[] NormalX = new Normal[3] { new Normal(mX[0], Math.Sqrt(KX[0, 0])), new Normal(mX[1], Math.Sqrt(KX[1, 1])), new Normal(mX[2], Math.Sqrt(KX[2, 2])) }; Normal[] NormalNu = new Normal[3] { new Normal(mNu[0], Math.Sqrt(KNu[0, 0])), new Normal(mNu[1], Math.Sqrt(KNu[1, 1])), new Normal(mNu[2], Math.Sqrt(KNu[2, 2])) };; TestEnvironmentStatic testSphere = new TestEnvironmentStatic { Phi = x => Utils.cart2sphere(x), InvPhi = y => Utils.sphere2cart(y), W = () => Exts.Vector(NormalX[0].Sample(), NormalX[1].Sample(), NormalX[2].Sample()), Nu = () => Exts.Vector(NormalNu[0].Sample(), NormalNu[1].Sample(), NormalNu[2].Sample()), MX = mX, KX = KX, KNu = KNu }; testSphere.Initialize(N, o.OutputFolder); Vector <double> mErr; Matrix <double> KErr; Matrix <double> KErrTh; Vector <double> mErr_inv; Matrix <double> KErr_inv; Matrix <double> KErrTh_inv; Vector <double> mErr_lin; Matrix <double> KErr_lin; Matrix <double> KErrTh_lin; Vector <double> mErr_UT; Matrix <double> KErr_UT; Matrix <double> KErrTh_UT; string fileName_alldata = Path.Combine(o.OutputFolder, "test_sphere_alldata.txt"); testSphere.GenerateBundle(N, out mErr, out KErr, out KErrTh, out mErr_inv, out KErr_inv, out KErrTh_inv, out mErr_lin, out KErr_lin, out KErrTh_lin, out mErr_UT, out KErr_UT, out KErrTh_UT, fileName_alldata); string fileName = Path.Combine(o.OutputFolder, "test_sphere.txt"); using (System.IO.StreamWriter outputfile = new System.IO.StreamWriter(fileName)) { //outputfile.WriteLine($"P = {P}"); outputfile.WriteLine($"mErr = {mErr}"); outputfile.WriteLine($"KErr = {KErr}"); outputfile.WriteLine($"KErrTh = {KErrTh}"); //outputfile.WriteLine($"P_inv = {P_inv}"); outputfile.WriteLine($"mErr_inv = {mErr_inv}"); outputfile.WriteLine($"KErr_inv = {KErr_inv}"); outputfile.WriteLine($"KErrTh_inv = {KErrTh_inv}"); //outputfile.WriteLine($"P_lin = {P_lin}"); outputfile.WriteLine($"mErr_lin = {mErr_lin}"); outputfile.WriteLine($"KErr_lin = {KErr_lin}"); outputfile.WriteLine($"KErrTh_lin = {KErrTh_lin}"); //outputfile.WriteLine($"P_UT = {P_UT}"); outputfile.WriteLine($"mErr_UT = {mErr_UT}"); outputfile.WriteLine($"KErr_UT = {KErr_UT}"); outputfile.WriteLine($"KErrTh_UT = {KErrTh_UT}"); outputfile.Close(); } } #endregion #region polar if (o.Model == "polar") { int N = o.N; Vector <double> mX = Exts.Vector(300, 400); Matrix <double> KX = Exts.Diag(30 * 30, 30 * 30); //Vector<double> mX = Exts.Vector(30000, 40000); Matrix<double> KX = Exts.Diag(100 * 100, 100 * 100); //Vector<double> mX = Exts.Vector(30000, 40000); Matrix<double> KX = Exts.Diag(4500 * 4500, 4500 * 4500); Vector <double> mNu = Exts.Vector(0, 0); Matrix <double> KNu = Exts.Diag(Math.Pow(5 * Math.PI / 180.0, 2.0), 30 * 30); Normal[] NormalX = new Normal[2] { new Normal(mX[0], Math.Sqrt(KX[0, 0])), new Normal(mX[1], Math.Sqrt(KX[1, 1])) }; Normal[] NormalNu = new Normal[2] { new Normal(mNu[0], Math.Sqrt(KNu[0, 0])), new Normal(mNu[1], Math.Sqrt(KNu[1, 1])) };; //Console.WriteLine(mX.ToLine()); TestEnvironmentStatic testPolar = new TestEnvironmentStatic { Phi = x => Utils.cart2pol(x), InvPhi = y => Utils.pol2cart(y), W = () => Exts.Vector(NormalX[0].Sample(), NormalX[1].Sample()), Nu = () => Exts.Vector(NormalNu[0].Sample(), NormalNu[1].Sample()), MX = mX, KX = KX, KNu = KNu }; testPolar.Initialize(N, o.OutputFolder); Vector <double> mErr; Matrix <double> KErr; Matrix <double> KErrTh; Vector <double> mErr_inv; Matrix <double> KErr_inv; Matrix <double> KErrTh_inv; Vector <double> mErr_lin; Matrix <double> KErr_lin; Matrix <double> KErrTh_lin; Vector <double> mErr_UT; Matrix <double> KErr_UT; Matrix <double> KErrTh_UT; string fileName_alldata = Path.Combine(o.OutputFolder, "test_polar_alldata.txt"); testPolar.GenerateBundle(N, out mErr, out KErr, out KErrTh, out mErr_inv, out KErr_inv, out KErrTh_inv, out mErr_lin, out KErr_lin, out KErrTh_lin, out mErr_UT, out KErr_UT, out KErrTh_UT, fileName_alldata); string fileName = Path.Combine(o.OutputFolder, "test_polar.txt"); using (System.IO.StreamWriter outputfile = new System.IO.StreamWriter(fileName)) { //outputfile.WriteLine($"P = {P}"); outputfile.WriteLine($"mErr = {mErr}"); outputfile.WriteLine($"KErr = {KErr}"); outputfile.WriteLine($"KErrTh = {KErrTh}"); //outputfile.WriteLine($"P_inv = {P_inv}"); outputfile.WriteLine($"mErr_inv = {mErr_inv}"); outputfile.WriteLine($"KErr_inv = {KErr_inv}"); outputfile.WriteLine($"KErrTh_inv = {KErrTh_inv}"); //outputfile.WriteLine($"P_lin = {P_lin}"); outputfile.WriteLine($"mErr_lin = {mErr_lin}"); outputfile.WriteLine($"KErr_lin = {KErr_lin}"); outputfile.WriteLine($"KErrTh_lin = {KErrTh_lin}"); //outputfile.WriteLine($"P_UT = {P_UT}"); outputfile.WriteLine($"mErr_UT = {mErr_UT}"); outputfile.WriteLine($"KErr_UT = {KErr_UT}"); outputfile.WriteLine($"KErrTh_UT = {KErrTh_UT}"); outputfile.Close(); } } #endregion #region polartwo if (o.Model == "polartwo") { int N = o.N; Vector <double> secondpoint = Exts.Vector(-10000, 10000); Vector <double> mX = Exts.Vector(30000, 40000); Matrix <double> KX = Exts.Diag(2000 * 2000, 2000 * 2000); Vector <double> mNu = Exts.Vector(0, 0, 0, 0); Matrix <double> KNu = Exts.Diag(Math.Pow(0.1 * Math.PI / 180.0, 2.0), 50 * 50, Math.Pow(0.1 * Math.PI / 180.0, 2.0), 50 * 50); Normal[] NormalX = new Normal[2] { new Normal(mX[0], Math.Sqrt(KX[0, 0])), new Normal(mX[1], Math.Sqrt(KX[1, 1])) }; Normal[] NormalNu = new Normal[4] { new Normal(mNu[0], Math.Sqrt(KNu[0, 0])), new Normal(mNu[1], Math.Sqrt(KNu[1, 1])), new Normal(mNu[2], Math.Sqrt(KNu[2, 2])), new Normal(mNu[3], Math.Sqrt(KNu[3, 3])) }; //Console.WriteLine(mX.ToLine()); TestEnvironmentStatic testPolar = new TestEnvironmentStatic { Phi = x => Exts.Stack(Utils.cart2pol(x), Utils.cart2pol(x - secondpoint)), InvPhi = y => Exts.Stack(Utils.pol2cart(Exts.Vector(y[0], y[1])), Utils.pol2cart(Exts.Vector(y[2], y[3])) + secondpoint), W = () => Exts.Vector(NormalX[0].Sample(), NormalX[1].Sample()), Nu = () => Exts.Vector(NormalNu[0].Sample(), NormalNu[1].Sample(), NormalNu[2].Sample(), NormalNu[3].Sample()), //Nu = () => Exts.Vector(0,0,0,0), MX = mX, KX = KX, KNu = KNu }; testPolar.Initialize(N, o.OutputFolder); Vector <double> mErr; Matrix <double> KErr; Matrix <double> KErrTh; Vector <double> mErr_inv; Matrix <double> KErr_inv; Matrix <double> KErrTh_inv; Vector <double> mErr_lin; Matrix <double> KErr_lin; Matrix <double> KErrTh_lin; Vector <double> mErr_UT; Matrix <double> KErr_UT; Matrix <double> KErrTh_UT; string fileName_alldata = Path.Combine(o.OutputFolder, "test_polartwo_alldata.txt"); testPolar.GenerateBundle(N, out mErr, out KErr, out KErrTh, out mErr_inv, out KErr_inv, out KErrTh_inv, out mErr_lin, out KErr_lin, out KErrTh_lin, out mErr_UT, out KErr_UT, out KErrTh_UT, fileName_alldata); string fileName = Path.Combine(o.OutputFolder, "test_polartwo.txt"); using (System.IO.StreamWriter outputfile = new System.IO.StreamWriter(fileName)) { //outputfile.WriteLine($"P = {P}"); outputfile.WriteLine($"mErr = {mErr}"); outputfile.WriteLine($"KErr = {KErr}"); outputfile.WriteLine($"KErrTh = {KErrTh}"); //outputfile.WriteLine($"P_inv = {P_inv}"); outputfile.WriteLine($"mErr_inv = {mErr_inv}"); outputfile.WriteLine($"KErr_inv = {KErr_inv}"); outputfile.WriteLine($"KErrTh_inv = {KErrTh_inv}"); //outputfile.WriteLine($"P_lin = {P_lin}"); outputfile.WriteLine($"mErr_lin = {mErr_lin}"); outputfile.WriteLine($"KErr_lin = {KErr_lin}"); outputfile.WriteLine($"KErrTh_lin = {KErrTh_lin}"); //outputfile.WriteLine($"P_UT = {P_UT}"); outputfile.WriteLine($"mErr_UT = {mErr_UT}"); outputfile.WriteLine($"KErr_UT = {KErr_UT}"); outputfile.WriteLine($"KErrTh_UT = {KErrTh_UT}"); outputfile.Close(); } } #endregion } else { TestEnvironmentVector testEnv = new TestEnvironmentVector(); if (o.Model == "cubic") { testEnv = new TestCubicSensorScalar(o.DW, o.DNu); } if (o.Model == "invprop-good") { testEnv = new TestInverseProportionGoodScalar(o.Bound, o.DW, o.DNu); } if (o.Model == "invprop-bad") { testEnv = new TestInverseProportionBadScalar(o.Bound, o.DW, o.DNu); } if (o.Model == "logreg-simple") { testEnv = new TestLogisticModelScalar(o.Bound, o.DW, o.DNu); } if (o.Model == "logreg-zero") { testEnv = new TestLogisticModelZeroScalar(o.Bound, o.DW, o.DNu); } if (o.Model == "logreg-uniform") { testEnv = new TestLogisticModelUniformNoiseScalar(); } if (o.Model == "samplereg") { testEnv = new TestSampledRegression(o.DNu); } if (o.Model == "switchingobs") { testEnv = new TestSwitchingObservations(o.DNu); } if (o.Model == "switchingobsident") { switch (o.IdentNumber) { case 1: testEnv = new AnotherTestSwitchingObservationsIdentification(o.DNu); break; case 2: testEnv = new YetAnotherTestSwitchingObservationsIdentification(o.DNu); break; case 3: testEnv = new HopefullyTheLastTestSwitchingObservationsIdentification(o.DNu); break; default: testEnv = new TestSwitchingObservationsIdentification(o.DNu); break; } } if (o.Model == "simpleident") { testEnv = new TestSimpleIdentification(); } string CMNFFileName = Path.Combine(o.OutputFolder, "cmnf.params"); if (!string.IsNullOrWhiteSpace(o.CMNFFileName)) { CMNFFileName = o.CMNFFileName; } string BCMNFFileName = Path.Combine(o.OutputFolder, "bcmnf.params"); if (!string.IsNullOrWhiteSpace(o.BCMNFFileName)) { BCMNFFileName = o.BCMNFFileName; } string UKFFileName = Path.Combine(o.OutputFolder, "ukf.params"); if (!string.IsNullOrWhiteSpace(o.UKFFileName)) { UKFFileName = o.UKFFileName; } string UKFOptStepwiseNMFileName = Path.Combine(o.OutputFolder, "ukfoptstepwiseNM.params"); if (!string.IsNullOrWhiteSpace(o.UKFStepwiseNelderMeadFileName)) { UKFOptStepwiseNMFileName = o.UKFStepwiseNelderMeadFileName; } string UKFOptIntegralNMFileName = Path.Combine(o.OutputFolder, "ukfoptintegralNM.params"); if (!string.IsNullOrWhiteSpace(o.UKFIntegralNelderMeadFileName)) { UKFOptIntegralNMFileName = o.UKFIntegralNelderMeadFileName; } string UKFOptStepwiseRandFileName = Path.Combine(o.OutputFolder, "ukfoptstepwiserand.params"); if (!string.IsNullOrWhiteSpace(o.UKFStepwiseRandomShootFileName)) { UKFOptStepwiseRandFileName = o.UKFStepwiseRandomShootFileName; } string UKFOptIntegralRandFileName = Path.Combine(o.OutputFolder, "ukfoptintegralrand.params"); if (!string.IsNullOrWhiteSpace(o.UKFIntegralRandomShootFileName)) { UKFOptIntegralRandFileName = o.UKFIntegralRandomShootFileName; } List <(FilterType, string)> filters = new List <(FilterType, string)>(); if (o.CMNF) { filters.Add((FilterType.CMNF, CMNFFileName)); } if (o.BCMNF) { testEnv.Alpha = testEnv.Xi; testEnv.Gamma = (t, x, y) => y; filters.Add((FilterType.BCMNF, BCMNFFileName)); } if (o.MCMNF) { testEnv.nMCMNF = o.MCMNFTrainCount; filters.Add((FilterType.MCMNF, string.Empty)); } if (o.UKF) { filters.Add((FilterType.UKFNoOptimization, UKFFileName)); } if (o.UKFStepwiseNelderMead) { filters.Add((FilterType.UKFStepwise, UKFOptStepwiseNMFileName)); } if (o.UKFIntegralNelderMead) { filters.Add((FilterType.UKFIntegral, UKFOptIntegralNMFileName)); } if (o.UKFStepwiseRandomShoot) { filters.Add((FilterType.UKFStepwiseRandomShoot, UKFOptStepwiseRandFileName)); } if (o.UKFIntegralRandomShoot) { filters.Add((FilterType.UKFIntegralRandomShoot, UKFOptIntegralRandFileName)); } if (o.EKF) { filters.Add((FilterType.EKF, string.Empty)); } if (o.Dummy) { filters.Add((FilterType.Dummy, string.Empty)); } using (System.IO.StreamWriter outputfile = new System.IO.StreamWriter(Path.Combine(o.OutputFolder, "parameters.txt"), true)) { outputfile.WriteLine($"{DateTime.Now}\t{string.Join(" ", args)}"); outputfile.Close(); } if (o.Bulk) { testEnv.GenerateBundleSamples(o.T, o.TrainCount, o.OutputFolder); } else { testEnv.Initialize(o.T, o.TrainCount, o.OutputFolder, filters, o.Save, o.Load); if (o.Aggregate) { testEnv.Aggregate(o.OutputFolder, o.OutputFolder, !o.NoBin, !o.NoText); } if (!o.Skip) { testEnv.GenerateBundles(o.BundleCount, o.TestCount, o.OutputFolder, o.Parallel, o.ParallelismDegree, !o.NoBin, !o.NoText); //if (o.BundleCount > 1) // testEnv.GenerateBundles(o.BundleCount, o.TestCount, o.OutputFolder, o.Parallel, o.ParallelismDegree); //else // testEnv.GenerateBundle(o.TestCount, o.OutputFolder); if (o.SamplesCount == 1) { testEnv.GenerateOne(o.OutputFolder); } else { for (int i = 0; i < o.SamplesCount; i++) { testEnv.GenerateOne(o.OutputFolder, i); } } //testEnv.GenerateReport(o.TemplatesFolder, o.PlotsFolder); } if (!o.NoPython) { testEnv.ProcessResults(o.OutputFolder, o.ScriptsFolder, o.PlotsFolder); } } } }