public override bool ApplyOn() { using (var csvFile = new CSVFiler(CsvPath)) { //Simple numerical integration using RungeKutta 4th order int nsteps = Convert.ToInt32(Math.Ceiling((Xn - X0) / H)); double[] xi = Generate.LinearSpaced(nsteps, X0, Xn); double[] dx = new double[nsteps]; dx[0] = ExecuteComponent(xi[0]); double[] yi = new double[nsteps]; yi[0] = Y0; //WriteLinetoCSV(csvFile, 0, xi[0], dx[0], yi[0]); for (int i = 1; i < nsteps; i++) { double k1 = ExecuteComponent(xi[i - 1]); double k2 = ExecuteComponent(xi[i - 1] + (H / 2)); double k3 = ExecuteComponent(xi[i - 1] + (H / 2)); double k4 = ExecuteComponent(xi[i - 1] + H); dx[i] = k1; yi[i] = yi[i - 1] + (H / 6) * (k1 + 2 * k2 + 2 * k3 + k4); //WriteLinetoCSV(csvFile, i, xi[i], dx[i], yi[i]); } WriteLinetoCSVArray(csvFile, xi, dx, yi); } return(true); }
private void WriteLinetoCSVArray(CSVFiler csvFile, double[] x, double[] e, double[] y) { csvFile.NewRow(); csvFile.AddToRow(0 /*IntegerVectorData.ValueToString(i)*/); csvFile.AddToRow(DoubleVectorData.ValueToString(x)); csvFile.AddToRow(DoubleVectorData.ValueToString(e)); csvFile.AddToRow(DoubleVectorData.ValueToString(y)); csvFile.WriteRow(); }
private void WriteLinetoCSV(CSVFiler csvFile, int i, double x, double e, double y) { csvFile.NewRow(); csvFile.AddToRow(i); csvFile.AddToRow(x, 4); csvFile.AddToRow(e, 4); csvFile.AddToRow(y, 4); csvFile.WriteRow(); }
private void WriteCsvRow(CSVFiler csv) { csv.NewRow(); IEnumerable <Data> allData = parameters.Select(p => p.Data) .Concat(designVariables.Select(dv => dv.Data)) .Concat(objectives.Select(o => o.Data)); foreach (Data data in allData) { csv.AddToRow(data); } csv.WriteRow(); }
public override bool ApplyOn() { //string outputString = GetOutputString(d => d.Name); using (var csvFile = new CSVFiler(CsvPath)) { Vector <double> x0 = SetInitialConditionsAndparameters(); WriteCsvRow(csvFile); //MinimizationResult result = bfgsMinimizer.FindMinimum(objectiveFunctionWithGradient, x0); MinimizationResult result = minimizer.FindMinimum(ObjectiveFunction, x0); SetOptimum(result.MinimizingPoint); WriteCsvRow(csvFile); return(result.ReasonForExit == ExitCondition.Converged); } }
public override bool ApplyOn() { //string outputString = GetOutputString(d => d.Name); int ndes = designVariables.Count; int mobj = objectives.Count; bool status = (mobj + ndes) < ndes ? false : true; Vector <double> inputs = Vector <double> .Build.Dense(ndes); Vector <double> outputs = Vector <double> .Build.Dense(mobj); // Obtain initial values and parameters SetInitialConditionsAndparameters(inputs); using (var csvFile = new CSVFiler(CsvPath)) { // Initial evaluation ExecuteComponent(inputs, outputs); double newNorm = outputs.L2Norm(); //outputString += GetOutputString(d => d.ValueAsString); WriteCsvRow(csvFile); int iterationsWithoutImprovement = 0; int iterations = 0; for (; iterations < MaxIterations; iterations++) { Matrix <double> J = JacobianCalculator(inputs, outputs, PercentageOfDifference); Vector <double> step = J.TransposeThisAndMultiply(J).Cholesky().Solve(J.TransposeThisAndMultiply(outputs)); /* Vector<double> nextInput = inputs - step; * if (checkforbounds(nextInput, (input_options as cTreatment_InOut_OP_Input).setuplist, ndescount) == false) * { * //diffperc = 0.01; * inp[0] = inp[0] + 1; * diff = diffcalculator(diffperc, inp, ndescount); * continue; * }*/ // Take the step inputs -= step; // Function evaluation ExecuteComponent(inputs, outputs); //outputString += GetOutputString(d => d.ValueAsString); WriteCsvRow(csvFile); // Check if the objective function has been improved double oldNorm = newNorm; newNorm = outputs.L2Norm(); if (newNorm < oldNorm) { iterationsWithoutImprovement = 0; } else { iterationsWithoutImprovement++; if (iterationsWithoutImprovement >= 10) { Console.WriteLine("10 consecutive iterations without improvig the objective function"); break; } } } if (iterations >= MaxIterations - 1) { Console.WriteLine("Reached 4999 iterations in Gauss-Newton Method"); } } return(status); }
public override bool ApplyOn() { csvCreation = true; #region Version 1.0 var tradestudy = new ArrayList(); for (int i = 0; i < factors.Count; i++) { var options = new TS_Input_set("Data", factors[i].Name, "min", (double)startingValues[i], "max", (double)(startingValues[i] * noOfLevels[i]), "Increment", (double)noOfLevels[i]); tradestudy.Add(options); } var treatmentTSInput = new Treatment_InOut_TS_Input(tradestudy); #endregion Version 1.0 string directory = Path.GetDirectoryName(databaseFileName); string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(databaseFileName); string connectionString = $"DataSource=\"{databaseFileName}\""; var connection = new SqlCeConnection(connectionString); SqlCeCommand insertCmd = null; string sql = ""; var filer = new CSVFiler(CsvPath); try { #region Results Database Preparation for (int i = 0; i < factors.Count; i++) { Result.MinValues.Add((double)(startingValues[i])); if (stepSizes == null) { Result.MaxValues.Add((double)(arr[i][arr[i].Count() - 1])); } else { Result.MaxValues.Add((double)(startingValues[i] + (noOfLevels[i] - 1) * stepSizes[i])); } } foreach (Data data in responses) { // Minimum and maximum values for result will be added later after execution of the workflow responsesMinValues.Add(Double.PositiveInfinity); responsesMaxValues.Add(Double.NegativeInfinity); } #endregion Results Database Preparation #region Permutations Generation var permutations = new List <List <decimal> >(); foreach (decimal init in arr[0]) { var temp = new List <decimal> { init }; permutations.Add(temp); } for (int i = 1; i < arr.Length; ++i) { permutations = Permutation(permutations, arr[i]); } #endregion Permutations Generation #region SDF File if (sdfCreation) { #region Create tables if (connection.State == ConnectionState.Closed) { connection.Open(); } string createTableSQL = "create table " + fileNameWithoutExtension + " (ID int, "; for (int i = 0; i < factors.Count(); i++) { string columnHeader = factors[i].Name; createTableSQL += columnHeader + " "; if (factors[i] is IntegerData) { createTableSQL += "int, "; } else if (factors[i] is DoubleData) { createTableSQL += "float, "; } } for (int i = 0; i < responses.Count(); i++) { string columnHeader = responses[i].Name; createTableSQL += columnHeader + " "; if ((responses[i]) is IntegerData) { createTableSQL += "int, "; } else if (responses[i] is DoubleData) { createTableSQL += "float, "; } else if (responses[i] is DoubleVectorData) { createTableSQL += "nvarchar(2000), "; } else if (responses[i] is DoubleMatrixData) { createTableSQL += "nvarchar(4000), "; } } if (factors.Count() + responses.Count() > 0) { createTableSQL = createTableSQL.Remove(createTableSQL.Length - 2); } createTableSQL += ")"; // Create SQL create table command for "SQL Server Compact Edition" var createTableSQLCmd = new SqlCeCommand(createTableSQL, connection); createTableSQLCmd.ExecuteNonQuery(); #endregion Create tables #region Insert SQL Command sql = "insert into " + fileNameWithoutExtension + " (ID, "; string valuesString = "values (@ID, "; for (int i = 0; i < factors.Count; i++) { sql += factors[i].Name + ", "; valuesString += "@" + factors[i].Name + ", "; } for (int i = 0; i < responses.Count; i++) { sql += responses[i].Name + ", "; valuesString += "@" + responses[i].Name + ", "; } if (factors.Count + responses.Count > 0) { sql = sql.Remove(sql.Length - 2); valuesString = valuesString.Remove(valuesString.Length - 2); } sql += ")"; valuesString += ")"; sql += (" " + valuesString); #endregion Insert SQL Command } #endregion SDF File int tableID = 0; int sz = factors.Count; //int tot = (int)inf[1]; long tot = permutations.Count; double[,] indices = new double[tot, sz]; long updatePeriod = Math.Max(tot / 100, 1); foreach (List <decimal> list in permutations) { tableID++; #region Parameter Value Assignment for (int i = 0; i < list.Count; i++) { Data workflowInput = Component.ModelDataInputs.Find(delegate(Data d) { return(d.Name == factors[i].Name); }); if (workflowInput is IntegerData) { workflowInput.Value = (int)list[i]; } if (workflowInput is DoubleData) { workflowInput.Value = (double)list[i]; } } #endregion Parameter Value Assignment #region SDF Creation if (sdfCreation) { insertCmd = new SqlCeCommand(sql, connection); insertCmd.Parameters.AddWithValue("@ID", tableID); for (int i = 0; i < list.Count; i++) { insertCmd.Parameters.AddWithValue("@" + factors[i].Name, list[i]); } } #endregion SDF Creation // Execute workflow bool statusToCheck = Component.Execute(); for (int i = 0; i < responses.Count; i++) { // Store workflow data outputs as responses Data workflowData = null; workflowData = Component.ModelDataInputs.Find(delegate(Data d) { return(d.Name == responses[i].Name); }); if (workflowData == null) { workflowData = Component.ModelDataOutputs.Find(delegate(Data d) { return(d.Name == responses[i].Name); }); } if (workflowData != null) { #region SDF Creation if (sdfCreation) { if (workflowData is DoubleData) { responses[i].Value = Convert.ToDouble(workflowData.Value); //atif and xin 29042016 if (((double)(workflowData.Value)) < responsesMinValues[i]) { responsesMinValues[i] = Convert.ToDouble(workflowData.Value); } if (((double)(workflowData.Value)) > responsesMaxValues[i]) { responsesMaxValues[i] = Convert.ToDouble(workflowData.Value); } // Update database insert command insertCmd.Parameters.AddWithValue("@" + responses[i].Name, (double)(responses[i].Value)); } else if (workflowData is DoubleVectorData) { responses[i].Value = workflowData.Value; // Update database insert command string val = ""; foreach (double d in (double[])(responses[i].Value)) { val += (d + ","); } val = val.TrimEnd(','); insertCmd.Parameters.AddWithValue("@" + responses[i].Name, val); } else if (workflowData is DoubleMatrixData) { responses[i].Value = workflowData.Value; // Update database insert command double[,] data = (double[, ])(responses[i].Value); string val = ""; for (int r = 0; r < data.GetLength(0); r++) { for (int c = 0; c < data.GetLength(1); c++) { val += (data[r, c] + ","); } val = val.TrimEnd(','); val += ";"; } val = val.TrimEnd(';'); insertCmd.Parameters.AddWithValue("@" + responses[i].Name, val); } else if (workflowData is IntegerData) { responses[i].Value = (int)(workflowData.Value); if (((int)(workflowData.Value)) < responsesMinValues[i]) { responsesMinValues[i] = (int)(workflowData.Value); } if (((int)(workflowData.Value)) > responsesMaxValues[i]) { responsesMaxValues[i] = (int)(workflowData.Value); } // Update database insert command insertCmd.Parameters.AddWithValue("@" + responses[i].Name, (int)(responses[i].Value)); } else if (workflowData is IntegerVectorData) { responses[i].Value = workflowData.Value; // Update database insert command string val = ""; foreach (int d in (int[])(responses[i].Value)) { val += (d + ","); } val = val.TrimEnd(','); insertCmd.Parameters.AddWithValue("@" + responses[i].Name, val); } else { } } #endregion SDF Creation } } // Execute database insert command if (statusToCheck) { #region SDF Creation if (sdfCreation) { insertCmd.ExecuteNonQuery(); } #endregion SDF Creation if (csvCreation) { filer.NewRow(); filer.AddToRow(tableID); for (int i = 0; i < list.Count; i++) { filer.AddToRow(list[i]); } for (int i = 0; i < responses.Count; i++) { filer.AddToRow(responses[i]); } filer.WriteRow(); } } if (tableID % updatePeriod == 0) { ProgressReposter.ReportProgress(Convert.ToInt32(tableID * 100.0 / tot)); } } } catch (SqlCeException sqlexception) { Console.WriteLine(sqlexception.Message, "Oh Crap."); } catch (Exception ex) { Console.WriteLine(ex.Message, "Oh Crap."); } finally { connection.Close(); filer.Dispose(); ProgressReposter.ReportProgress(100); } // Results Min and Max values for (int i = 0; i < responses.Count; i++) { Result.MinValues.Add(responsesMinValues[i]); Result.MaxValues.Add(responsesMaxValues[i]); } return(true); }
protected void ExecuteSegment(WorkflowComponent component, MissionSegment segment) { times = segment.Times; int NSamples = segment.Samples; //segment.GetComponentCalssificationAndIndices(Component, // out int[] variablesInputsInModel, out int[] variablesInputsInTable, out int[] constantInputsInModel, out int[] constantInputsInSegment, // out int[] variablesOutputsInModel, out int[] variablesOutputsInTable, out int[] constantOutputsInModel, out int[] constantOutputsInSegment); var(variablesInputsInModel, constantInputsInModel, variablesOutputsInModel, constantOutputsInModel, missionParameters) = segment.GetComponentCalssificationAndIndices(component); int NInVariables = variablesInputsInModel.Length; int NInConstants = constantInputsInModel.Length; int NOutVariables = variablesOutputsInModel.Length; int NOutConstants = constantOutputsInModel.Length; // Constants for (int i = 0; i < NInConstants; i++) { var parameter = missionParameters[i] as ConstantMissionParameter; component.ModelDataInputs[constantInputsInModel[i]].Value = parameter.Value; } string csvPath = $"{Path.GetFileNameWithoutExtension(CsvPath)}.{component.Name}.{segment}.csv"; using (var csvFile = new CSVFiler(CsvPath)) { for (int i = 0; i < NSamples; i++) { // If user request to cancel the iterations the method will throw EndIteratoinIfCancelled(); // Variables for (int j = 0; j < NInVariables; j++) { var parameter = missionParameters[NInConstants + j] as VariableMissionParameter; component.ModelDataInputs[variablesInputsInModel[j]].Value = parameter.Values[i]; } bool statusToCheck = component.Execute(); // Write csv file if (statusToCheck) { // Update outputs in segment for (int j = 0; j < NOutVariables; j++) { var parameter = missionParameters[NInConstants + NInVariables + j] as VariableMissionParameter; parameter.Update(component.ModelDataOutputs[variablesOutputsInModel[j]].Value, i); } // Output Constants for (int j = 0; j < NInConstants; j++) { var parameter = missionParameters[NInConstants + NInVariables + NOutVariables + j] as ConstantMissionParameter; parameter.Update(component.ModelDataOutputs[constantOutputsInModel[j]].Value); } csvFile.NewRow(); csvFile.AddToRow(i); csvFile.AddToRow(times[i], 2); for (int d = 0; d < NInVariables; d++) { csvFile.AddToRow(component.ModelDataInputs[variablesInputsInModel[d]]); } for (int d = 0; d < NInConstants; d++) { csvFile.AddToRow(component.ModelDataInputs[constantInputsInModel[d]]); } for (int d = 0; d < NOutVariables; d++) { csvFile.AddToRow(component.ModelDataOutputs[variablesOutputsInModel[d]]); } for (int d = 0; d < NOutConstants; d++) { csvFile.AddToRow(component.ModelDataOutputs[constantOutputsInModel[d]]); } csvFile.WriteRow(); } } } }
private void WriteResults() { //using (var csvFile = new CSVFiler(CsvPath)) //{ // var parametersLookup = Mission.Segments.SelectMany(s => s.Parameters.Select(p => (p, s.Times))).ToLookup(t => t.Item1.Data.Name); // csvFile.NewRow(); // csvFile.AddToRow(1); // foreach (var data in Mission.Data) // { // var tuples = parametersLookup[data.Name]; // double[] times = tuples.Select(t => t.Item2).Aggregate(new List<double>(), (t, l) => // { // t.AddRange(l); // return t; // }, t => t.ToArray()); // var values = tuples.Select(t => // { // (MissionParameter parameter, double[] timeArray) = t; // var vals = new double[0]; // if (parameter is ConstantMissionParameter constantParameter) // { // if (constantParameter.Data.Value is double d) // { // vals = MathNet.Numerics.Generate.Repeat(timeArray.Length, d); // } // else if (constantParameter.Data.Value is int i) // { // vals = MathNet.Numerics.Generate.Repeat(timeArray.Length, (double)i); // } // } // else if (parameter is VariableMissionParameter variableParameter) // { // vals = variableParameter.Values.Cast<double>().ToArray(); // } // return vals; // }).Aggregate(new List<double>(), (t, l) => // { // t.AddRange(l); // return t; // }, t => t.ToArray()); // csvFile.AddToRow(DoubleVectorData.ValueToString(times)); // csvFile.AddToRow(DoubleVectorData.ValueToString(values)); // } // csvFile.WriteRow(); //} using (var csvFile = new CSVFiler(CsvPath)) { csvFile.NewRow(); csvFile.AddToRow(1); foreach (var segment in Mission.Segments) { double[] times = segment.Times; csvFile.AddToRow(DoubleVectorData.ValueToString(times)); foreach (var parameter in segment.Parameters) { var values = new double[0]; if (parameter is ConstantMissionParameter constantParameter) { if (constantParameter.Data.Value is double d) { values = MathNet.Numerics.Generate.Repeat(times.Length, d); } else if (constantParameter.Data.Value is int i) { values = MathNet.Numerics.Generate.Repeat(times.Length, (double)i); } } else if (parameter is VariableMissionParameter variableParameter) { values = variableParameter.Values.Cast <double>().ToArray(); } csvFile.AddToRow(DoubleVectorData.ValueToString(values)); } } csvFile.WriteRow(); } }
protected void Execute_(ExecutableComponent oModSub, long NFactors, long NSamples, double[,] inputsTable, string[] factorNames, string[] responseNames) { int[] indices = new int[NFactors]; for (int i = 0; i < NFactors; i++) { indices[i] = oModSub.ModelDataInputs.IndexOf(oModSub.ModelDataInputs.Find(d => d.Name == factorNames[i])); } List <Data> allData = (oModSub as WorkflowComponent).GetAllData(); long NResponses = allData.Count - NFactors; int[] indices2 = new int[NResponses]; for (int i = 0; i < NResponses; i++) { indices2[i] = allData.IndexOf(allData.Find(d => d.Name == responseNames[i])); } IterationSize = NSamples; using (var csvFile = new CSVFiler(CsvPath)) { for (int i = 0; i < NSamples; i++) { // If user request to cancel the iterations the method will throw EndIteratoinIfCancelled(); for (int j = 0; j < NFactors; j++) { oModSub.ModelDataInputs[indices[j]].Value = inputsTable[i, j]; } bool statusToCheck = false; try { statusToCheck = oModSub.Execute(); } catch (Exception e) { } // Report that i iterations have been completed ReportProgress(i); // Execute database insert command if (statusToCheck) { csvFile.NewRow(); csvFile.AddToRow(i); for (int d = 0; d < NFactors; d++) { csvFile.AddToRow(oModSub.ModelDataInputs[indices[d]]); } for (int d = 0; d < NResponses; d++) { csvFile.AddToRow(allData[indices2[d]]); } var con = allData[indices2[0]]; csvFile.WriteRow(); } else { csvFile.NewRow(); csvFile.AddToRow(i); for (int d = 0; d < NFactors; d++) { csvFile.AddToRow(oModSub.ModelDataInputs[indices[d]]); } for (int d = 0; d < NResponses; d++) { csvFile.AddToRow(0.0); } csvFile.WriteRow(); } } } }
public void Analyse(List <IProbabilityDistribution> inputDistributions, List <IProbabilityDistribution> outputDistributions, WorkflowComponent innerWorkflow) { using (filer = new CSVFiler(path)) { int NVariables = inputDistributions.Count; int NTargets = outputDistributions.Count; var sampler = new FASTSampler(NVariables); int NSamples = sampler.Ns; int Ns2 = NSamples / 2; propagator.Propagate(inputDistributions, outputDistributions, innerWorkflow); Matrix <double> samples = propagator.Samples; double[] ResultMeans = new double[NTargets]; double[] ResultVariances = new double[NTargets]; double[] ResultStandardDeviations = new double[NTargets]; for (int r = 0; r < NTargets; r++) //for rth result (output) { //to store intermedient sum double sum = 0; double sumSquared = 0; int r2 = NVariables + r; for (int s = 0; s < Ns2; s++) { double sample = samples[s, r2]; sum += sample; sumSquared += sample * sample; } ResultMeans[r] = sum / Ns2; ResultVariances[r] = sumSquared / Ns2 - (ResultMeans[r]) * (ResultMeans[r]); //sobol's original approach (satelli's approach to be added) ResultStandardDeviations[r] = Sqrt(ResultVariances[r]); } var A = new List <double>(); var B = new List <double>(); var Lambda = new List <double>(); double[] samplesFAST = sampler.Samples; Matrix <double> Sensitivities = Matrix <double> .Build.Dense(NTargets, NVariables); for (int t = 0; t < NTargets; t++) { ResultVariances[t] = 0; Lambda.Clear(); //for (int j = -(NSamples - 1) / 2; j <= (NSamples - 1) / 2; j++) in "A Quantitative Model-Independent Method for Global Sensitivity Analysis of Model Output" for (int s = 0; s < Ns2; s++) //seems -(NSamples - 1) / 2 ~ 0 are not used { double tempFA = 0; double tempFB = 0; for (int k = 0; k < NSamples; k++) { //tempFA += ResultMatrix[k][t] * Cos(s * samplesFAST[k]); //tempFB += ResultMatrix[k][t] * Sin(s * samplesFAST[k]); tempFA += samples[k, NVariables + t] * Cos(s * samplesFAST[k]); tempFB += samples[k, NVariables + t] * Sin(s * samplesFAST[k]); } tempFA = tempFA / NSamples; tempFB = tempFB / NSamples; A.Add(tempFA); B.Add(tempFB); double lambda = Pow(tempFA, 2) + Pow(tempFB, 2); Lambda.Add(lambda); ResultVariances[t] += lambda; } ResultVariances[t] *= 2; ResultStandardDeviations[t] = Sqrt(ResultVariances[t]); ResultMeans[t] = samples.SubMatrix(0, Ns2, NVariables + t, 1).Column(0).Sum() / Ns2; // Get Sensitivities for (int v = 0; v < NVariables; v++) { Sensitivities[t, v] = 0; for (int p = 1; p <= sampler.M; p++) { int temp_counter = p * sampler.Omega[v] - 1; Sensitivities[t, v] += Lambda[temp_counter]; } Sensitivities[t, v] *= 2 / ResultVariances[t]; } //Write .csv file filer.NewRow(); filer.AddToRow(t); filer.AddToRow(outputDistributions[t].Name); foreach (var s in Sensitivities.Row(t)) { filer.AddToRow(s); } filer.WriteRow(); } for (int t = 0; t < NTargets; t++) { filer.NewRow(); filer.AddToRow(t); filer.AddToRow(outputDistributions[t].Name); for (int v = 0; v < NVariables; v++) { filer.AddToRow(Sensitivities[t, v]); } filer.AddToRow(1 - Sensitivities.Row(t).Sum()); } } }
public void Propagate(List <IProbabilityDistribution> inputDistributions, List <IProbabilityDistribution> outputDistributions, WorkflowComponent innerWorkflow) { filer = (createFile) ? new CSVFiler(path) : null; try { int NInputDistributions = inputDistributions.Count; int Nout = outputDistributions.Count; foreach (IProbabilityDistribution dist in inputDistributions) { dist.Data.Value = dist.Mean; } // Defining the "deltas" for the computation of the propagation stencils: double[] h_plus = new double[NInputDistributions]; double[] h_minus = new double[NInputDistributions]; for (int j = 0; j < NInputDistributions; j++) { IProbabilityDistribution dist = inputDistributions[j]; h_plus[j] = dist.Skewness / 2 + Math.Sqrt(dist.Kurtosis - (3.0 / 4) * Math.Pow(dist.Skewness, 2)); h_minus[j] = dist.Skewness / 2 - Math.Sqrt(dist.Kurtosis - (3.0 / 4) * Math.Pow(dist.Skewness, 2)); } // Setup of the URQ weights: double W0 = 1; double[] Wp = new double[NInputDistributions]; double[] Wp_plus = new double[NInputDistributions]; double[] Wp_minus = new double[NInputDistributions]; double[] Wp_plusminus = new double[NInputDistributions]; for (int i = 0; i < NInputDistributions; i++) { W0 += 1.0 / (h_plus[i] * h_minus[i]); double delta = h_plus[i] - h_minus[i]; Wp[i] = 1.0 / delta; Wp_plus[i] = (Math.Pow(h_plus[i], 2) - h_plus[i] * h_minus[i] - 1) / (Math.Pow(delta, 2)); Wp_minus[i] = (Math.Pow(h_minus[i], 2) - h_plus[i] * h_minus[i] - 1) / (Math.Pow(delta, 2)); Wp_plusminus[i] = 2 / (Math.Pow(delta, 2)); } // Center point evaluation ExecutePoint(innerWorkflow, Nout, out double[] output0); double[] means = new double[Nout]; double[] variances = new double[Nout]; for (int i = 0; i < Nout; i++) { means[i] = W0 * output0[i]; } // Stencil evaluation: for (int p = 0; p < NInputDistributions; p++) { IProbabilityDistribution dist = inputDistributions[p]; // Dimension i, forward stencil point evaluation dist.Data.Value = dist.Mean + h_plus[p] * Math.Sqrt(dist.Variance); ExecutePoint(innerWorkflow, Nout, out double[] output_plus); // Dimension i, backeard stencil point evaluation dist.Data.Value = dist.Mean + h_minus[p] * Math.Sqrt(dist.Variance); ExecutePoint(innerWorkflow, Nout, out double[] output_minus); // Estimation of the mean and variance for all the model outputs: for (int j = 0; j < Nout; j++) { means[j] += Wp[p] * ((output_plus[j] / h_plus[p]) - (output_minus[j] / h_minus[p])); double deltap = (output_plus[j] - output0[j]) / h_plus[p]; double deltam = (output_minus[j] - output0[j]) / h_minus[p]; variances[j] += Wp_plus[p] * deltap * deltap + Wp_minus[p] * deltam * deltam + Wp_plusminus[p] * deltap * deltam; } // Recover original value dist.Data.Value = dist.Mean; } for (int i = 0; i < outputDistributions.Count; i++) { outputDistributions[i].Update(new double[] { means[i], variances[i], 0, 3 }); } } finally { filer?.Dispose(); } }
public override bool ApplyOn(ExecutableComponent oModSub) { //string outputString = ""; string[] output1 = new string[count]; using (var csvFile = new CSVFiler(CsvPath)) { for (int i = 0; i < count; i++) { double xi = this.xi[i]; double xn = this.xn[i]; double stepSize = this.stepSize[i]; Data input = this.input[i]; Data output = this.output[i]; string option = this.option[i]; //Selecting the initial step size as 1/1000th of interval double h1 = (xn - xi) / 1000; bool zeroSensitivity = false; while (true) { double f1 = EvalModel(oModSub, input, output, xi); double f2 = EvalModel(oModSub, input, output, xi + h1); // If the output appears to be insensitive to the input if ((Math.Abs(f1 - f2)) < 1e-16) { // Duplicate the step h1 *= 2; // If the step is greater than the studied interval, // declare than the output is insensitive if (h1 > (xn - xi)) { zeroSensitivity = true; break; } } else { zeroSensitivity = false; break; } } double h2 = h1 / 10; double h = 0; double termc = 0.01; if (zeroSensitivity == true) { Console.WriteLine("The output has zero sensitivity with respect to selected input"); } else { double x; double f1; double f2; while (true) { x = xi; f1 = EvalModel(oModSub, input, output, x); x = xi + 2 * h1; f2 = EvalModel(oModSub, input, output, x); double s1 = (f2 - f1) / (2 * h1); x = xi; f1 = EvalModel(oModSub, input, output, x); x = xi + 2 * h2; f2 = EvalModel(oModSub, input, output, x); double s2 = (f2 - f1) / (2 * h2); double diff = (s1 - s2) / s1; // Deciding the step size for partial derivative // If the sensitivites are similar keep the step if (diff <= termc) { break; } // Other wise reduce the step by 10 h1 = h2; h2 /= 10; } h = h1; double[] xinp = Generate.LinearRange(xi, stepSize, xn); int nSteps = xinp.Length; double[] sens = new double[nSteps]; double[] f0s = new double[nSteps]; xinp[0] = xi; f0s[0] = EvalModel(oModSub, input, output, xi); csvFile.NewRow(); output1[i] = output1[i] + input.Name + " Sen(abs)" + "\r\n"; for (int j = 0; j < nSteps; j++) { x = xinp[j] - h; f1 = EvalModel(oModSub, input, output, x); x = xinp[j] + h; f2 = EvalModel(oModSub, input, output, x); if (option == "Absolute Sensitivity") { sens[j] = EvalSenAb(f1, f2, h); } else { sens[j] = EvalSenRel(f1, f2, f0s[j], xinp[j], h); f0s[j + 1] = EvalModel(oModSub, input, output, xinp[j + 1]); } csvFile.AddToRow(xinp[j]); csvFile.AddToRow(sens[j]); csvFile.WriteRow(); } } } } //outputString = output1[0]; //output_struct = new Treatment_InOut(outputString); return(true); }
public void Propagate(List <IMCSDistribution> inputDistributions, List <ProbabilityDistributionFromSamples> outputDistributions, WorkflowComponent innerWorkflow) { filer = (createFile) ? new CSVFiler(path) : null; try { int Ninputs = inputDistributions.Count; int Noutputs = outputDistributions.Count; Matrix <double> samples = Matrix <double> .Build.Dense(NSamples, Ninputs + Noutputs); for (int i = 0; i < inputDistributions.Count; i++) { samples.SetColumn(i, inputDistributions[i].GetSamples(NSamples)); } for (int s = 0; s < NSamples; s++) { int v = 0; foreach (IProbabilityDistribution input in inputDistributions) { input.Data.Value = samples[s, v]; v++; } // Execute workflow bool statusToCheck = innerWorkflow.Execute(); foreach (IProbabilityDistribution output in outputDistributions) { samples[s, v] = Convert.ToDouble(output.Data.Value); v++; } if (createFile && statusToCheck) { // Execute database insert command filer.NewRow(); //filer.AddToRow(i); foreach (Data input in innerWorkflow.ModelDataInputs) { filer.AddToRow(input); } foreach (Data output in innerWorkflow.ModelDataInputs) { filer.AddToRow(output); } filer.WriteRow(); } } int o = Ninputs; foreach (IProbabilityDistribution output in outputDistributions) { output.Update(samples.Column(o).AsArray()); o++; } Samples = samples; } finally { filer?.Dispose(); } }