/// <summary> /// Construct a loaded row. /// </summary> /// /// <param name="csv">The CSV file to use.</param> /// <param name="extra">The number of extra columns to add.</param> public LoadedRow(ReadCSV csv, int extra) { int count = csv.GetCount(); _data = new String[count + extra]; for (int i = 0; i < count; i++) { _data[i] = csv.Get(i); } }
public LoadedRow(ReadCSV csv, int extra) { int count; int num2; if ((((uint) num2) + ((uint) count)) >= 0) { } count = csv.GetCount(); this._x4a3f0a05c02f235f = new string[count + extra]; for (num2 = 0; num2 < count; num2++) { this._x4a3f0a05c02f235f[num2] = csv.Get(num2); } }
/// <summary> /// Private constructor. /// </summary> private PropertyConstraints() { _data = new Dictionary<String, List<PropertyEntry>>(); try { Stream mask0 = ResourceLoader.CreateStream("Encog.Resources.analyst.csv"); var csv = new ReadCSV(mask0, false, CSVFormat.EgFormat); while (csv.Next()) { String sectionStr = csv.Get(0); String nameStr = csv.Get(1); String typeStr = csv.Get(2); // determine type PropertyType t; if ("boolean".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeBoolean; } else if ("real".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeDouble; } else if ("format".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeFormat; } else if ("int".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeInteger; } else if ("list-string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeListString; } else if ("string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeString; } else { throw new AnalystError("Unknown type constraint: " + typeStr); } var entry = new PropertyEntry(t, nameStr, sectionStr); List<PropertyEntry> list; if (_data.ContainsKey(sectionStr)) { list = _data[sectionStr]; } else { list = new List<PropertyEntry>(); _data[sectionStr] = list; } list.Add(entry); } csv.Close(); mask0.Close(); } catch (IOException e) { throw new EncogError(e); } }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { // Download the data that we will attempt to model. string filename = DownloadData(app.Args); // Define the format of the data file. // This area will change, depending on the columns and // format of the file that you are trying to model. var format = new CSVFormat('.', ' '); // decimal point and space separated IVersatileDataSource source = new CSVDataSource(filename, false, format); var data = new VersatileMLDataSet(source); data.NormHelper.Format = format; ColumnDefinition columnMPG = data.DefineSourceColumn("mpg", 0, ColumnType.Continuous); ColumnDefinition columnCylinders = data.DefineSourceColumn("cylinders", 1, ColumnType.Ordinal); // It is very important to predefine ordinals, so that the order is known. columnCylinders.DefineClass(new[] {"3", "4", "5", "6", "8"}); data.DefineSourceColumn("displacement", 2, ColumnType.Continuous); ColumnDefinition columnHorsePower = data.DefineSourceColumn("horsepower", 3, ColumnType.Continuous); data.DefineSourceColumn("weight", 4, ColumnType.Continuous); data.DefineSourceColumn("acceleration", 5, ColumnType.Continuous); ColumnDefinition columnModelYear = data.DefineSourceColumn("model_year", 6, ColumnType.Ordinal); columnModelYear.DefineClass(new[] {"70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "80", "81", "82"}); data.DefineSourceColumn("origin", 7, ColumnType.Nominal); // Define how missing values are represented. data.NormHelper.DefineUnknownValue("?"); data.NormHelper.DefineMissingHandler(columnHorsePower, new MeanMissingHandler()); // Analyze the data, determine the min/max/mean/sd of every column. data.Analyze(); // Map the prediction column to the output of the model, and all // other columns to the input. data.DefineSingleOutputOthersInput(columnMPG); // Create feedforward neural network as the model type. MLMethodFactory.TYPE_FEEDFORWARD. // You could also other model types, such as: // MLMethodFactory.SVM: Support Vector Machine (SVM) // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network // MLMethodFactor.TYPE_NEAT: NEAT Neural Network // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network var model = new EncogModel(data); model.SelectMethod(data, MLMethodFactory.TypeFeedforward); // Send any output to the console. model.Report = new ConsoleStatusReportable(); // Now normalize the data. Encog will automatically determine the correct normalization // type based on the model you chose in the last step. data.Normalize(); // Hold back some data for a final validation. // Shuffle the data into a random ordering. // Use a seed of 1001 so that we always use the same holdback and will get more consistent results. model.HoldBackValidation(0.3, true, 1001); // Choose whatever is the default training type for this model. model.SelectTrainingType(data); // Use a 5-fold cross-validated train. Return the best method found. var bestMethod = (IMLRegression) model.Crossvalidate(5, true); // Display the training and validation errors. Console.WriteLine(@"Training error: " + model.CalculateError(bestMethod, model.TrainingDataset)); Console.WriteLine(@"Validation error: " + model.CalculateError(bestMethod, model.ValidationDataset)); // Display our normalization parameters. NormalizationHelper helper = data.NormHelper; Console.WriteLine(helper.ToString()); // Display the final model. Console.WriteLine("Final model: " + bestMethod); // Loop over the entire, original, dataset and feed it through the model. // This also shows how you would process new data, that was not part of your // training set. You do not need to retrain, simply use the NormalizationHelper // class. After you train, you can save the NormalizationHelper to later // normalize and denormalize your data. source.Close(); var csv = new ReadCSV(filename, false, format); var line = new String[7]; IMLData input = helper.AllocateInputVector(); while (csv.Next()) { var result = new StringBuilder(); line[0] = csv.Get(1); line[1] = csv.Get(2); line[2] = csv.Get(3); line[3] = csv.Get(4); line[4] = csv.Get(5); line[5] = csv.Get(6); line[6] = csv.Get(7); String correct = csv.Get(0); helper.NormalizeInputVector(line, ((BasicMLData) input).Data, false); IMLData output = bestMethod.Compute(input); String irisChosen = helper.DenormalizeOutputVectorToString(output)[0]; result.Append(line); result.Append(" -> predicted: "); result.Append(irisChosen); result.Append("(correct: "); result.Append(correct); result.Append(")"); Console.WriteLine(result.ToString()); } csv.Close(); // Delete data file and shut down. File.Delete(filename); EncogFramework.Instance.Shutdown(); }
private void x08af8e36ac9914b5() { ReadCSV dcsv = null; try { int num; double num2; dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); goto Label_006B; Label_0021: num++; if ((((uint) num2) & 0) == 0) { } Label_005E: while (dcsv.Next()) { if (!base.ShouldStop()) { goto Label_0075; } if ((((uint) num2) + ((uint) num)) <= uint.MaxValue) { break; } } return; Label_006B: base.ResetStatus(); num = 0; goto Label_005E; Label_0075: base.UpdateStatus("Reading data"); using (IEnumerator<BaseCachedColumn> enumerator = base.Columns.GetEnumerator()) { BaseCachedColumn column; FileData data; Label_008F: if (enumerator.MoveNext() || ((((uint) num) + ((uint) num)) > uint.MaxValue)) { goto Label_011D; } goto Label_0021; Label_00BD: if (column.Input) { goto Label_00D8; } goto Label_008F; Label_00C7: if (0 == 0) { } goto Label_008F; Label_00CC: if (column is FileData) { goto Label_00BD; } goto Label_00C7; Label_00D8: data = (FileData) column; string str = dcsv.Get(data.Index); num2 = base.InputFormat.Parse(str); data.Data[num] = num2; goto Label_008F; Label_0111: if (0 == 0) { goto Label_00CC; } goto Label_00BD; Label_011D: column = enumerator.Current; goto Label_0111; } } finally { base.ReportDone("Reading data"); if (dcsv != null) { dcsv.Close(); } } }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { ErrorCalculation.Mode = ErrorCalculationMode.RMS; // Download the data that we will attempt to model. string filename = DownloadData(app.Args); // Define the format of the data file. // This area will change, depending on the columns and // format of the file that you are trying to model. var format = new CSVFormat('.', ' '); // decimal point and // space separated IVersatileDataSource source = new CSVDataSource(filename, true, format); var data = new VersatileMLDataSet(source); data.NormHelper.Format = format; ColumnDefinition columnSSN = data.DefineSourceColumn("SSN", ColumnType.Continuous); ColumnDefinition columnDEV = data.DefineSourceColumn("DEV", ColumnType.Continuous); // Analyze the data, determine the min/max/mean/sd of every column. data.Analyze(); // Use SSN & DEV to predict SSN. For time-series it is okay to have // SSN both as // an input and an output. data.DefineInput(columnSSN); data.DefineInput(columnDEV); data.DefineOutput(columnSSN); // Create feedforward neural network as the model type. // MLMethodFactory.TYPE_FEEDFORWARD. // You could also other model types, such as: // MLMethodFactory.SVM: Support Vector Machine (SVM) // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network // MLMethodFactor.TYPE_NEAT: NEAT Neural Network // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network var model = new EncogModel(data); model.SelectMethod(data, MLMethodFactory.TypeFeedforward); // Send any output to the console. model.Report = new ConsoleStatusReportable(); // Now normalize the data. Encog will automatically determine the // correct normalization // type based on the model you chose in the last step. data.Normalize(); // Set time series. data.LeadWindowSize = 1; data.LagWindowSize = WindowSize; // Hold back some data for a final validation. // Do not shuffle the data into a random ordering. (never shuffle // time series) // Use a seed of 1001 so that we always use the same holdback and // will get more consistent results. model.HoldBackValidation(0.3, false, 1001); // Choose whatever is the default training type for this model. model.SelectTrainingType(data); // Use a 5-fold cross-validated train. Return the best method found. // (never shuffle time series) var bestMethod = (IMLRegression) model.Crossvalidate(5, false); // Display the training and validation errors. Console.WriteLine(@"Training error: " + model.CalculateError(bestMethod, model.TrainingDataset)); Console.WriteLine(@"Validation error: " + model.CalculateError(bestMethod, model.ValidationDataset)); // Display our normalization parameters. NormalizationHelper helper = data.NormHelper; Console.WriteLine(helper.ToString()); // Display the final model. Console.WriteLine(@"Final model: " + bestMethod); // Loop over the entire, original, dataset and feed it through the // model. This also shows how you would process new data, that was // not part of your training set. You do not need to retrain, simply // use the NormalizationHelper class. After you train, you can save // the NormalizationHelper to later normalize and denormalize your // data. source.Close(); var csv = new ReadCSV(filename, true, format); var line = new String[2]; // Create a vector to hold each time-slice, as we build them. // These will be grouped together into windows. var slice = new double[2]; var window = new VectorWindow(WindowSize + 1); IMLData input = helper.AllocateInputVector(WindowSize + 1); // Only display the first 100 int stopAfter = 100; while (csv.Next() && stopAfter > 0) { var result = new StringBuilder(); line[0] = csv.Get(2); // ssn line[1] = csv.Get(3); // dev helper.NormalizeInputVector(line, slice, false); // enough data to build a full window? if (window.IsReady()) { window.CopyWindow(((BasicMLData) input).Data, 0); String correct = csv.Get(2); // trying to predict SSN. IMLData output = bestMethod.Compute(input); String predicted = helper .DenormalizeOutputVectorToString(output)[0]; result.Append(line); result.Append(" -> predicted: "); result.Append(predicted); result.Append("(correct: "); result.Append(correct); result.Append(")"); Console.WriteLine(result.ToString()); } // Add the normalized slice to the window. We do this just after // the after checking to see if the window is ready so that the // window is always one behind the current row. This is because // we are trying to predict next row. window.Add(slice); stopAfter--; } csv.Close(); // Delete data file and shut down. File.Delete(filename); EncogFramework.Instance.Shutdown(); }
/// <summary> /// Read the CSV file. /// </summary> private void ReadFile() { ReadCSV csv = null; try { csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ResetStatus(); int row = 0; while (csv.Next() && !ShouldStop()) { UpdateStatus("Reading data"); foreach (BaseCachedColumn column in Columns) { if (column is FileData) { if (column.Input) { var fd = (FileData) column; String str = csv.Get(fd.Index); double d = Format.Parse(str); fd.Data[row] = d; } } } row++; } } finally { ReportDone("Reading data"); if (csv != null) { csv.Close(); } } }
public void Process(EncogAnalyst target) { string text1; int num; int num2; string str; bool flag; bool flag2; bool flag3; AnalyzedField field3; int num3; IList<AnalystClassItem> analyzedClassMembers; IList<AnalystClassItem> classMembers; int num4; DataField[] fieldArray; int num5; AnalyzedField[] fieldArray2; int num6; AnalyzedField[] fieldArray3; int num7; AnalyzedField[] fieldArray4; int num8; CSVFormat format = ConvertStringConst.ConvertToCSVFormat(this._x5786461d089b10a0); ReadCSV dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format); Label_0676: if (dcsv.Next()) { if (this._xa942970cc8a85fd4 == null) { this.xd2a854890d89a856(dcsv); } num = 0; while (num < dcsv.ColumnCount) { if (this._xa942970cc8a85fd4 != null) { this._xa942970cc8a85fd4[num].Analyze1(dcsv.Get(num)); } num++; } if (((uint) num2) >= 0) { goto Label_0676; } } else if (this._xa942970cc8a85fd4 != null) { fieldArray2 = this._xa942970cc8a85fd4; } else { if ((((uint) num8) & 0) == 0) { goto Label_05F5; } goto Label_05D0; } if ((((uint) num2) - ((uint) flag2)) >= 0) { for (num6 = 0; num6 < fieldArray2.Length; num6++) { fieldArray2[num6].CompletePass1(); } goto Label_05F5; } goto Label_05D0; Label_0011: num5++; if (((uint) num2) < 0) { goto Label_0251; } Label_002C: if (num5 < fieldArray.Length) { fieldArray[num5] = this._xa942970cc8a85fd4[num5].FinalizeField(); if ((((uint) num6) + ((uint) num)) <= uint.MaxValue) { goto Label_0011; } if ((((uint) num8) | 3) != 0) { goto Label_00E8; } } else { if (((uint) flag2) > uint.MaxValue) { goto Label_0336; } target.Script.Fields = fieldArray; return; } Label_00A6: if (this._xa942970cc8a85fd4.Length == target.Script.Fields.Length) { num3 = 0; goto Label_00EE; } if ((((uint) flag3) & 0) != 0) { goto Label_0248; } Label_00D7: fieldArray = new DataField[this._xa942970cc8a85fd4.Length]; if ((((uint) num6) + ((uint) num4)) >= 0) { num5 = 0; goto Label_002C; } goto Label_0011; Label_00E8: num3++; Label_00EE: if (num3 < this._xa942970cc8a85fd4.Length) { this._xa942970cc8a85fd4[num3].Name = target.Script.Fields[num3].Name; if (!this._xa942970cc8a85fd4[num3].Class) { goto Label_00E8; } analyzedClassMembers = this._xa942970cc8a85fd4[num3].AnalyzedClassMembers; classMembers = target.Script.Fields[num3].ClassMembers; if (classMembers.Count != analyzedClassMembers.Count) { goto Label_00E8; } num4 = 0; if (((uint) num2) > uint.MaxValue) { goto Label_0341; } goto Label_0195; } goto Label_00D7; Label_018F: num4++; Label_0195: if (num4 < classMembers.Count) { if (analyzedClassMembers[num4].Code.Equals(classMembers[num4].Code)) { analyzedClassMembers[num4].Name = classMembers[num4].Name; } goto Label_018F; } goto Label_00E8; Label_0238: if (num8 < fieldArray4.Length) { field3 = fieldArray4[num8]; if ((((uint) num) & 0) != 0) { goto Label_02FF; } if (field3.Class) { if (flag) { goto Label_0350; } if (((uint) num4) <= uint.MaxValue) { goto Label_03E6; } goto Label_040F; } goto Label_0251; } if (target.Script.Fields != null) { goto Label_00A6; } if ((((uint) num8) + ((uint) num5)) > uint.MaxValue) { goto Label_0341; } goto Label_00D7; Label_0248: if (field3.Integer && (field3.AnalyzedClassMembers.Count <= 2)) { if ((((uint) num6) - ((uint) num5)) >= 0) { if ((((uint) num7) | 4) == 0) { goto Label_059B; } field3.Class = false; } else { if ((((uint) flag2) + ((uint) num8)) >= 0) { goto Label_0350; } goto Label_02FF; } } Label_0251: num8++; goto Label_0238; Label_02FF: if (!flag2 && (field3.Real && !field3.Integer)) { field3.Class = false; } goto Label_0248; Label_030B: if ((((uint) flag3) - ((uint) num2)) >= 0) { goto Label_02FF; } Label_0341: while (!field3.Real) { field3.Class = false; goto Label_02FF; Label_0336: if (field3.Integer) { goto Label_030B; } } goto Label_0368; Label_0350: if (flag3) { goto Label_02FF; } if ((((uint) num5) & 0) == 0) { goto Label_0336; } Label_0368: if ((((uint) flag) - ((uint) num4)) >= 0) { goto Label_02FF; } goto Label_030B; Label_03E6: if (!field3.Integer) { goto Label_0350; } Label_040F: field3.Class = false; if ((((uint) num7) + ((uint) flag)) < 0) { goto Label_05F5; } if ((((uint) num3) + ((uint) flag)) > uint.MaxValue) { goto Label_04D6; } if ((((uint) num3) & 0) == 0) { goto Label_0350; } goto Label_03E6; Label_04A8: dcsv.Close(); Label_04AE: text1 = this._x594135906c55045c.Properties.GetPropertyString("SETUP:CONFIG_allowedClasses"); if (text1 != null) { str = text1; } else { if (0 != 0) { goto Label_02FF; } str = ""; } flag = str.Contains("int"); if (str.Contains("real")) { } flag2 = true; flag3 = str.Contains("string"); fieldArray4 = this._xa942970cc8a85fd4; num8 = 0; goto Label_0238; Label_04D6: if (num7 >= fieldArray3.Length) { } AnalyzedField field2 = fieldArray3[num7]; if (((uint) num4) < 0) { goto Label_04AE; } field2.CompletePass2(); num7++; if ((((uint) num4) + ((uint) num2)) < 0) { goto Label_018F; } if ((((uint) num3) + ((uint) flag)) >= 0) { goto Label_04D6; } Label_0554: if (((uint) flag) > uint.MaxValue) { goto Label_059B; } goto Label_04D6; if ((((uint) num2) | 0x7fffffff) != 0) { if ((((uint) flag2) - ((uint) num3)) >= 0) { goto Label_04A8; } goto Label_05D0; } Label_058B: num2++; Label_0591: if (num2 < dcsv.ColumnCount) { goto Label_05D0; } Label_059B: if (dcsv.Next()) { num2 = 0; goto Label_0591; } if (this._xa942970cc8a85fd4 != null) { fieldArray3 = this._xa942970cc8a85fd4; num7 = 0; goto Label_0554; } goto Label_04A8; Label_05D0: if (this._xa942970cc8a85fd4 != null) { this._xa942970cc8a85fd4[num2].Analyze2(dcsv.Get(num2)); } goto Label_058B; Label_05F5: dcsv.Close(); if (((uint) num) >= 0) { } dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format); goto Label_059B; }
/// <summary> /// Extract fields from a file into a numeric array for machine learning. /// </summary> /// /// <param name="analyst">The analyst to use.</param> /// <param name="headers">The headers for the input data.</param> /// <param name="csv">The CSV that holds the input data.</param> /// <param name="outputLength">The length of the returned array.</param> /// <param name="skipOutput">True if the output should be skipped.</param> /// <returns>The encoded data.</returns> public static double[] ExtractFields(EncogAnalyst analyst, CSVHeaders headers, ReadCSV csv, int outputLength, bool skipOutput) { var output = new double[outputLength]; int outputIndex = 0; foreach (AnalystField stat in analyst.Script.Normalize.NormalizedFields) { if (stat.Action == NormalizationAction.Ignore) { continue; } if (stat.Output && skipOutput) { continue; } int index = headers.Find(stat.Name); String str = csv.Get(index); // is this an unknown value? if (str.Equals("?") || str.Length == 0) { IHandleMissingValues handler = analyst.Script.Normalize.MissingValues; double[] d = handler.HandleMissing(analyst, stat); // should we skip the entire row if (d == null) { return null; } // copy the returned values in place of the missing values for (int i = 0; i < d.Length; i++) { output[outputIndex++] = d[i]; } } else { // known value if (stat.Action == NormalizationAction.Normalize) { double d = csv.Format.Parse(str.Trim()); d = stat.Normalize(d); output[outputIndex++] = d; } else { double[] d = stat.Encode(str.Trim()); foreach (double element in d) { output[outputIndex++] = element; } } } } return output; }
public static double[] ExtractFields(EncogAnalyst analyst, CSVHeaders headers, ReadCSV csv, int outputLength, bool skipOutput) { double[] numArray = new double[outputLength]; int num = 0; using (IEnumerator<AnalystField> enumerator = analyst.Script.Normalize.NormalizedFields.GetEnumerator()) { AnalystField field; int num2; string str; IHandleMissingValues values; double[] numArray2; int num3; double num4; double num5; double[] numArray4; double[] numArray5; int num6; goto Label_0070; Label_0022: if (!skipOutput) { goto Label_02B7; } Label_0029: if ((((uint) skipOutput) + ((uint) num6)) > uint.MaxValue) { goto Label_00B1; } goto Label_0070; Label_0043: if ((((uint) num4) - ((uint) outputLength)) > uint.MaxValue) { goto Label_0022; } if (((uint) num3) > uint.MaxValue) { goto Label_022A; } Label_0070: if (enumerator.MoveNext()) { goto Label_02CD; } goto Label_01C2; Label_007E: if (((uint) num2) > uint.MaxValue) { goto Label_021B; } goto Label_0043; Label_009A: if (!field.Output) { goto Label_02B7; } goto Label_0022; Label_00B1: num5 = numArray5[num6]; Label_00B9: numArray[num++] = num5; num6++; Label_00C8: if (num6 < numArray5.Length) { goto Label_00B1; } if ((((uint) num3) + ((uint) num5)) >= 0) { goto Label_0116; } Label_00E8: numArray5 = field.Encode(str.Trim()); num6 = 0; if ((((uint) num6) + ((uint) num5)) >= 0) { goto Label_00C8; } Label_0116: if ((((uint) num4) - ((uint) num5)) <= uint.MaxValue) { goto Label_007E; } goto Label_01C2; Label_0138: if (field.Action == NormalizationAction.Normalize) { num4 = csv.Format.Parse(str.Trim()); num4 = field.Normalize(num4); numArray[num++] = num4; if ((((uint) outputLength) - ((uint) num6)) > uint.MaxValue) { goto Label_022A; } goto Label_0070; } if ((((uint) num2) - ((uint) outputLength)) <= uint.MaxValue) { goto Label_00E8; } goto Label_01C2; Label_01A4: if (num3 < numArray2.Length) { goto Label_0207; } goto Label_0070; Label_01B1: if (str.Length == 0) { goto Label_025F; } goto Label_0138; Label_01C2: if ((((uint) num6) - ((uint) num3)) >= 0) { return numArray; } if ((((uint) num2) - ((uint) num5)) <= uint.MaxValue) { goto Label_02CD; } goto Label_0295; Label_0202: num3 = 0; goto Label_01A4; Label_0207: numArray[num++] = numArray2[num3]; num3++; goto Label_02E6; Label_021B: numArray2 = values.HandleMissing(analyst, field); if (numArray2 != null) { goto Label_0202; } Label_022A: numArray4 = null; if ((((uint) outputLength) | 8) != 0) { return numArray4; } goto Label_02E6; Label_024E: if (!str.Equals("?")) { goto Label_01B1; } Label_025F: values = analyst.Script.Normalize.MissingValues; goto Label_021B; Label_0273: if (((uint) num5) > uint.MaxValue) { goto Label_00B9; } str = csv.Get(num2); goto Label_024E; Label_0295: if ((((uint) num5) | uint.MaxValue) == 0) { goto Label_0029; } goto Label_0022; Label_02B7: num2 = headers.Find(field.Name); goto Label_0273; Label_02CD: field = enumerator.Current; if (field.Action == NormalizationAction.Ignore) { goto Label_0070; } goto Label_009A; Label_02E6: if ((((uint) num4) - ((uint) skipOutput)) >= 0) { goto Label_01A4; } } return numArray; }
/// <summary> /// Perform the analysis. /// </summary> /// <param name="target">The Encog analyst object to analyze.</param> public void Process(EncogAnalyst target) { int count = 0; CSVFormat csvFormat = ConvertStringConst .ConvertToCSVFormat(_format); var csv = new ReadCSV(_filename, _headers, csvFormat); // pass one, calculate the min/max while (csv.Next()) { if (_fields == null) { GenerateFields(csv); } for (int i = 0; i < csv.ColumnCount; i++) { if (_fields != null) { _fields[i].Analyze1(csv.Get(i)); } } count++; } if (count == 0) { throw new AnalystError("Can't analyze file, it is empty."); } if (_fields != null) { foreach (AnalyzedField field in _fields) { field.CompletePass1(); } } csv.Close(); // pass two, standard deviation csv = new ReadCSV(_filename, _headers, csvFormat); while (csv.Next()) { for (int i = 0; i < csv.ColumnCount; i++) { if (_fields != null) { _fields[i].Analyze2(csv.Get(i)); } } } if (_fields != null) { foreach (AnalyzedField field in _fields) { field.CompletePass2(); } } csv.Close(); String str = _script.Properties.GetPropertyString( ScriptProperties.SetupConfigAllowedClasses) ?? ""; bool allowInt = str.Contains("int"); bool allowReal = str.Contains("real") || str.Contains("double"); bool allowString = str.Contains("string"); // remove any classes that did not qualify foreach (AnalyzedField field in _fields) { if (field.Class) { if (!allowInt && field.Integer) { field.Class = false; } if (!allowString && (!field.Integer && !field.Real)) { field.Class = false; } if (!allowReal && field.Real && !field.Integer) { field.Class = false; } } } // merge with existing if ((target.Script.Fields != null) && (_fields.Length == target.Script.Fields.Length)) { for (int i = 0; i < _fields.Length; i++) { // copy the old field name _fields[i].Name = target.Script.Fields[i].Name; if (_fields[i].Class) { IList<AnalystClassItem> t = _fields[i].AnalyzedClassMembers; IList<AnalystClassItem> s = target.Script.Fields[i].ClassMembers; if (s.Count == t.Count) { for (int j = 0; j < s.Count; j++) { if (t[j].Code.Equals(s[j].Code)) { t[j].Name = s[j].Name; } } } } } } // now copy the fields var df = new DataField[_fields.Length]; for (int i_4 = 0; i_4 < df.Length; i_4++) { df[i_4] = _fields[i_4].FinalizeField(); } target.Script.Fields = df; }
/// <summary> /// Analyze the input file. /// </summary> /// <param name="input">The input file.</param> /// <param name="headers">True, if there are headers.</param> /// <param name="format">The format of the CSV data.</param> public virtual void Analyze(FileInfo input, bool headers, CSVFormat format) { ResetStatus(); InputFilename = input; ExpectInputHeaders = headers; Format = format; _columnMapping.Clear(); _columns.Clear(); // first count the rows TextReader reader = null; try { int recordCount = 0; reader = new StreamReader(InputFilename.OpenRead()); while (reader.ReadLine() != null) { UpdateStatus(true); recordCount++; } if (headers) { recordCount--; } RecordCount = recordCount; } catch (IOException ex) { throw new QuantError(ex); } finally { ReportDone(true); if (reader != null) { try { reader.Close(); } catch (IOException e) { throw new QuantError(e); } } InputFilename = input; ExpectInputHeaders = headers; Format = format; } // now analyze columns ReadCSV csv = null; try { csv = new ReadCSV(input.ToString(), headers, format); if (!csv.Next()) { throw new QuantError("File is empty"); } for (int i = 0; i < csv.ColumnCount; i++) { String name; if (headers) { name = AttemptResolveName(csv.ColumnNames[i]); } else { name = "Column-" + (i + 1); } // determine if it should be an input/output field String str = csv.Get(i); bool io = false; try { Format.Parse(str); io = true; } catch (FormatException ex) { EncogLogging.Log(ex); } AddColumn(new FileData(name, i, io, io)); } } finally { if (csv != null) csv.Close(); Analyzed = true; } }
/// <summary> /// Get the data for a specific column. /// </summary> /// <param name="name">The column to read.</param> /// <param name="csv">The CSV file to read from.</param> /// <returns>The column data.</returns> public String GetColumnData(String name, ReadCSV csv) { if (!_columnMapping.ContainsKey(name)) { return null; } BaseCachedColumn column = _columnMapping[name]; if (!(column is FileData)) { return null; } var fd = (FileData) column; return csv.Get(fd.Index); }
public virtual void Analyze(FileInfo input, bool headers, CSVFormat format) { ReadCSV dcsv; int num2; bool flag; base.ResetStatus(); goto Label_02AA; Label_0143: dcsv = null; try { string str; string str2; dcsv = new ReadCSV(input.ToString(), headers, format); if (0 == 0) { goto Label_0273; } goto Label_0245; Label_0160: if (num2 < dcsv.ColumnCount) { goto Label_0257; } if (((((uint) headers) - ((uint) headers)) <= uint.MaxValue) && (((uint) headers) <= uint.MaxValue)) { return; } goto Label_01A8; Label_019D: num2++; goto Label_022E; Label_01A8: str = x0049197442052640(dcsv.ColumnNames[num2]); goto Label_01D4; Label_01BF: str = "Column-" + (num2 + 1); Label_01D4: str2 = dcsv.Get(num2); flag = false; try { base.InputFormat.Parse(str2); flag = true; } catch (FormatException exception3) { EncogLogging.Log(exception3); } this.AddColumn(new FileData(str, num2, flag, flag)); if ((((uint) num2) + ((uint) flag)) <= uint.MaxValue) { goto Label_019D; } Label_022E: if (((uint) flag) <= uint.MaxValue) { goto Label_0160; } return; Label_0245: if (((uint) headers) < 0) { goto Label_0273; } Label_0257: if (!headers) { goto Label_01BF; } goto Label_01A8; Label_0273: while (!dcsv.Next()) { throw new QuantError("File is empty"); } num2 = 0; goto Label_0160; } finally { if (dcsv != null) { dcsv.Close(); } base.Analyzed = true; } if (-1 == 0) { goto Label_0143; } if (2 != 0) { return; } Label_02AA: base.InputFilename = input; base.ExpectInputHeaders = headers; base.InputFormat = format; this._x5f81ddd16c23e357.Clear(); this._x26c511b92db96554.Clear(); TextReader reader = null; try { int num = 0; goto Label_0099; Label_002A: if ((((uint) num) | 0xfffffffe) != 0) { goto Label_0049; } Label_0044: if (headers) { goto Label_008E; } goto Label_0061; Label_0049: if ((((uint) num) - ((uint) num)) < 0) { goto Label_0044; } Label_0061: base.RecordCount = num; goto Label_0143; Label_006A: num++; Label_006E: if (reader.ReadLine() != null) { goto Label_00AC; } if ((((uint) num2) | 3) != 0) { goto Label_0044; } Label_008E: num--; goto Label_002A; Label_0099: reader = new StreamReader(base.InputFilename.OpenRead()); goto Label_006E; Label_00AC: base.UpdateStatus(true); goto Label_006A; } catch (IOException exception) { throw new QuantError(exception); } finally { base.ReportDone(true); goto Label_011E; Label_00CD: if ((((uint) headers) + ((uint) num2)) < 0) { goto Label_00EC; } goto Label_0142; Label_00E7: if (reader != null) { goto Label_0130; } goto Label_0104; Label_00EC: if ((((uint) num2) + ((uint) flag)) < 0) { goto Label_00E7; } Label_0104: base.InputFilename = input; base.ExpectInputHeaders = headers; base.InputFormat = format; if (0 == 0) { goto Label_00CD; } goto Label_0142; Label_011E: if (((uint) num2) <= uint.MaxValue) { goto Label_00E7; } Label_0130: try { reader.Close(); goto Label_0104; } catch (IOException exception2) { throw new QuantError(exception2); } goto Label_00EC; Label_0142:; } goto Label_0143; }
public string GetColumnData(string name, ReadCSV csv) { if (this._x5f81ddd16c23e357.ContainsKey(name)) { FileData data; BaseCachedColumn column = this._x5f81ddd16c23e357[name]; while (!(column is FileData)) { return null; } if (0 == 0) { data = (FileData) column; } return csv.Get(data.Index); } return null; }