public void Propagate(List <IProbabilityDistribution> inputDistributions, List <IProbabilityDistribution> outputDistributions, WorkflowComponent innerWorkflow) { filer = (createFile) ? new CSVFiler(path) : null; try { int NInputDistributions = inputDistributions.Count; int Nout = outputDistributions.Count; foreach (IProbabilityDistribution dist in inputDistributions) { dist.Data.Value = dist.Mean; } // Defining the "deltas" for the computation of the propagation stencils: double[] h_plus = new double[NInputDistributions]; double[] h_minus = new double[NInputDistributions]; for (int j = 0; j < NInputDistributions; j++) { IProbabilityDistribution dist = inputDistributions[j]; h_plus[j] = dist.Skewness / 2 + Math.Sqrt(dist.Kurtosis - (3.0 / 4) * Math.Pow(dist.Skewness, 2)); h_minus[j] = dist.Skewness / 2 - Math.Sqrt(dist.Kurtosis - (3.0 / 4) * Math.Pow(dist.Skewness, 2)); } // Setup of the URQ weights: double W0 = 1; double[] Wp = new double[NInputDistributions]; double[] Wp_plus = new double[NInputDistributions]; double[] Wp_minus = new double[NInputDistributions]; double[] Wp_plusminus = new double[NInputDistributions]; for (int i = 0; i < NInputDistributions; i++) { W0 += 1.0 / (h_plus[i] * h_minus[i]); double delta = h_plus[i] - h_minus[i]; Wp[i] = 1.0 / delta; Wp_plus[i] = (Math.Pow(h_plus[i], 2) - h_plus[i] * h_minus[i] - 1) / (Math.Pow(delta, 2)); Wp_minus[i] = (Math.Pow(h_minus[i], 2) - h_plus[i] * h_minus[i] - 1) / (Math.Pow(delta, 2)); Wp_plusminus[i] = 2 / (Math.Pow(delta, 2)); } // Center point evaluation ExecutePoint(innerWorkflow, Nout, out double[] output0); double[] means = new double[Nout]; double[] variances = new double[Nout]; for (int i = 0; i < Nout; i++) { means[i] = W0 * output0[i]; } // Stencil evaluation: for (int p = 0; p < NInputDistributions; p++) { IProbabilityDistribution dist = inputDistributions[p]; // Dimension i, forward stencil point evaluation dist.Data.Value = dist.Mean + h_plus[p] * Math.Sqrt(dist.Variance); ExecutePoint(innerWorkflow, Nout, out double[] output_plus); // Dimension i, backeard stencil point evaluation dist.Data.Value = dist.Mean + h_minus[p] * Math.Sqrt(dist.Variance); ExecutePoint(innerWorkflow, Nout, out double[] output_minus); // Estimation of the mean and variance for all the model outputs: for (int j = 0; j < Nout; j++) { means[j] += Wp[p] * ((output_plus[j] / h_plus[p]) - (output_minus[j] / h_minus[p])); double deltap = (output_plus[j] - output0[j]) / h_plus[p]; double deltam = (output_minus[j] - output0[j]) / h_minus[p]; variances[j] += Wp_plus[p] * deltap * deltap + Wp_minus[p] * deltam * deltam + Wp_plusminus[p] * deltap * deltam; } // Recover original value dist.Data.Value = dist.Mean; } for (int i = 0; i < outputDistributions.Count; i++) { outputDistributions[i].Update(new double[] { means[i], variances[i], 0, 3 }); } } finally { filer?.Dispose(); } }
public override bool ApplyOn() { csvCreation = true; #region Version 1.0 var tradestudy = new ArrayList(); for (int i = 0; i < factors.Count; i++) { var options = new TS_Input_set("Data", factors[i].Name, "min", (double)startingValues[i], "max", (double)(startingValues[i] * noOfLevels[i]), "Increment", (double)noOfLevels[i]); tradestudy.Add(options); } var treatmentTSInput = new Treatment_InOut_TS_Input(tradestudy); #endregion Version 1.0 string directory = Path.GetDirectoryName(databaseFileName); string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(databaseFileName); string connectionString = $"DataSource=\"{databaseFileName}\""; var connection = new SqlCeConnection(connectionString); SqlCeCommand insertCmd = null; string sql = ""; var filer = new CSVFiler(CsvPath); try { #region Results Database Preparation for (int i = 0; i < factors.Count; i++) { Result.MinValues.Add((double)(startingValues[i])); if (stepSizes == null) { Result.MaxValues.Add((double)(arr[i][arr[i].Count() - 1])); } else { Result.MaxValues.Add((double)(startingValues[i] + (noOfLevels[i] - 1) * stepSizes[i])); } } foreach (Data data in responses) { // Minimum and maximum values for result will be added later after execution of the workflow responsesMinValues.Add(Double.PositiveInfinity); responsesMaxValues.Add(Double.NegativeInfinity); } #endregion Results Database Preparation #region Permutations Generation var permutations = new List <List <decimal> >(); foreach (decimal init in arr[0]) { var temp = new List <decimal> { init }; permutations.Add(temp); } for (int i = 1; i < arr.Length; ++i) { permutations = Permutation(permutations, arr[i]); } #endregion Permutations Generation #region SDF File if (sdfCreation) { #region Create tables if (connection.State == ConnectionState.Closed) { connection.Open(); } string createTableSQL = "create table " + fileNameWithoutExtension + " (ID int, "; for (int i = 0; i < factors.Count(); i++) { string columnHeader = factors[i].Name; createTableSQL += columnHeader + " "; if (factors[i] is IntegerData) { createTableSQL += "int, "; } else if (factors[i] is DoubleData) { createTableSQL += "float, "; } } for (int i = 0; i < responses.Count(); i++) { string columnHeader = responses[i].Name; createTableSQL += columnHeader + " "; if ((responses[i]) is IntegerData) { createTableSQL += "int, "; } else if (responses[i] is DoubleData) { createTableSQL += "float, "; } else if (responses[i] is DoubleVectorData) { createTableSQL += "nvarchar(2000), "; } else if (responses[i] is DoubleMatrixData) { createTableSQL += "nvarchar(4000), "; } } if (factors.Count() + responses.Count() > 0) { createTableSQL = createTableSQL.Remove(createTableSQL.Length - 2); } createTableSQL += ")"; // Create SQL create table command for "SQL Server Compact Edition" var createTableSQLCmd = new SqlCeCommand(createTableSQL, connection); createTableSQLCmd.ExecuteNonQuery(); #endregion Create tables #region Insert SQL Command sql = "insert into " + fileNameWithoutExtension + " (ID, "; string valuesString = "values (@ID, "; for (int i = 0; i < factors.Count; i++) { sql += factors[i].Name + ", "; valuesString += "@" + factors[i].Name + ", "; } for (int i = 0; i < responses.Count; i++) { sql += responses[i].Name + ", "; valuesString += "@" + responses[i].Name + ", "; } if (factors.Count + responses.Count > 0) { sql = sql.Remove(sql.Length - 2); valuesString = valuesString.Remove(valuesString.Length - 2); } sql += ")"; valuesString += ")"; sql += (" " + valuesString); #endregion Insert SQL Command } #endregion SDF File int tableID = 0; int sz = factors.Count; //int tot = (int)inf[1]; long tot = permutations.Count; double[,] indices = new double[tot, sz]; long updatePeriod = Math.Max(tot / 100, 1); foreach (List <decimal> list in permutations) { tableID++; #region Parameter Value Assignment for (int i = 0; i < list.Count; i++) { Data workflowInput = Component.ModelDataInputs.Find(delegate(Data d) { return(d.Name == factors[i].Name); }); if (workflowInput is IntegerData) { workflowInput.Value = (int)list[i]; } if (workflowInput is DoubleData) { workflowInput.Value = (double)list[i]; } } #endregion Parameter Value Assignment #region SDF Creation if (sdfCreation) { insertCmd = new SqlCeCommand(sql, connection); insertCmd.Parameters.AddWithValue("@ID", tableID); for (int i = 0; i < list.Count; i++) { insertCmd.Parameters.AddWithValue("@" + factors[i].Name, list[i]); } } #endregion SDF Creation // Execute workflow bool statusToCheck = Component.Execute(); for (int i = 0; i < responses.Count; i++) { // Store workflow data outputs as responses Data workflowData = null; workflowData = Component.ModelDataInputs.Find(delegate(Data d) { return(d.Name == responses[i].Name); }); if (workflowData == null) { workflowData = Component.ModelDataOutputs.Find(delegate(Data d) { return(d.Name == responses[i].Name); }); } if (workflowData != null) { #region SDF Creation if (sdfCreation) { if (workflowData is DoubleData) { responses[i].Value = Convert.ToDouble(workflowData.Value); //atif and xin 29042016 if (((double)(workflowData.Value)) < responsesMinValues[i]) { responsesMinValues[i] = Convert.ToDouble(workflowData.Value); } if (((double)(workflowData.Value)) > responsesMaxValues[i]) { responsesMaxValues[i] = Convert.ToDouble(workflowData.Value); } // Update database insert command insertCmd.Parameters.AddWithValue("@" + responses[i].Name, (double)(responses[i].Value)); } else if (workflowData is DoubleVectorData) { responses[i].Value = workflowData.Value; // Update database insert command string val = ""; foreach (double d in (double[])(responses[i].Value)) { val += (d + ","); } val = val.TrimEnd(','); insertCmd.Parameters.AddWithValue("@" + responses[i].Name, val); } else if (workflowData is DoubleMatrixData) { responses[i].Value = workflowData.Value; // Update database insert command double[,] data = (double[, ])(responses[i].Value); string val = ""; for (int r = 0; r < data.GetLength(0); r++) { for (int c = 0; c < data.GetLength(1); c++) { val += (data[r, c] + ","); } val = val.TrimEnd(','); val += ";"; } val = val.TrimEnd(';'); insertCmd.Parameters.AddWithValue("@" + responses[i].Name, val); } else if (workflowData is IntegerData) { responses[i].Value = (int)(workflowData.Value); if (((int)(workflowData.Value)) < responsesMinValues[i]) { responsesMinValues[i] = (int)(workflowData.Value); } if (((int)(workflowData.Value)) > responsesMaxValues[i]) { responsesMaxValues[i] = (int)(workflowData.Value); } // Update database insert command insertCmd.Parameters.AddWithValue("@" + responses[i].Name, (int)(responses[i].Value)); } else if (workflowData is IntegerVectorData) { responses[i].Value = workflowData.Value; // Update database insert command string val = ""; foreach (int d in (int[])(responses[i].Value)) { val += (d + ","); } val = val.TrimEnd(','); insertCmd.Parameters.AddWithValue("@" + responses[i].Name, val); } else { } } #endregion SDF Creation } } // Execute database insert command if (statusToCheck) { #region SDF Creation if (sdfCreation) { insertCmd.ExecuteNonQuery(); } #endregion SDF Creation if (csvCreation) { filer.NewRow(); filer.AddToRow(tableID); for (int i = 0; i < list.Count; i++) { filer.AddToRow(list[i]); } for (int i = 0; i < responses.Count; i++) { filer.AddToRow(responses[i]); } filer.WriteRow(); } } if (tableID % updatePeriod == 0) { ProgressReposter.ReportProgress(Convert.ToInt32(tableID * 100.0 / tot)); } } } catch (SqlCeException sqlexception) { Console.WriteLine(sqlexception.Message, "Oh Crap."); } catch (Exception ex) { Console.WriteLine(ex.Message, "Oh Crap."); } finally { connection.Close(); filer.Dispose(); ProgressReposter.ReportProgress(100); } // Results Min and Max values for (int i = 0; i < responses.Count; i++) { Result.MinValues.Add(responsesMinValues[i]); Result.MaxValues.Add(responsesMaxValues[i]); } return(true); }
public void Propagate(List <IMCSDistribution> inputDistributions, List <ProbabilityDistributionFromSamples> outputDistributions, WorkflowComponent innerWorkflow) { filer = (createFile) ? new CSVFiler(path) : null; try { int Ninputs = inputDistributions.Count; int Noutputs = outputDistributions.Count; Matrix <double> samples = Matrix <double> .Build.Dense(NSamples, Ninputs + Noutputs); for (int i = 0; i < inputDistributions.Count; i++) { samples.SetColumn(i, inputDistributions[i].GetSamples(NSamples)); } for (int s = 0; s < NSamples; s++) { int v = 0; foreach (IProbabilityDistribution input in inputDistributions) { input.Data.Value = samples[s, v]; v++; } // Execute workflow bool statusToCheck = innerWorkflow.Execute(); foreach (IProbabilityDistribution output in outputDistributions) { samples[s, v] = Convert.ToDouble(output.Data.Value); v++; } if (createFile && statusToCheck) { // Execute database insert command filer.NewRow(); //filer.AddToRow(i); foreach (Data input in innerWorkflow.ModelDataInputs) { filer.AddToRow(input); } foreach (Data output in innerWorkflow.ModelDataInputs) { filer.AddToRow(output); } filer.WriteRow(); } } int o = Ninputs; foreach (IProbabilityDistribution output in outputDistributions) { output.Update(samples.Column(o).AsArray()); o++; } Samples = samples; } finally { filer?.Dispose(); } }