/// <summary> /// Write a row to the output file. /// </summary> /// <param name="tw">The output stream.</param> /// <param name="row">The row to write out.</param> public void WriteRow(StreamWriter tw, LoadedRow row) { var line = new StringBuilder(); foreach (string t in row.Data) { AppendSeparator(line, _format); line.Append(t); } tw.WriteLine(line.ToString()); }
/// <summary> /// Read the input file. /// </summary> /// private void ReadInputFile() { ResetStatus(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, InputFormat); while (csv.Next() && !ShouldStop()) { UpdateStatus("Reading input file"); var row = new LoadedRow(csv); _data.Add(row); } Count = csv.ColumnCount; if (ExpectInputHeaders) { InputHeadings = new String[csv.ColumnCount]; for (int i = 0; i < csv.ColumnCount; i++) { InputHeadings[i] = csv.ColumnNames[i]; } } csv.Close(); }
public void Process(FileInfo outputFile) { StreamWriter writer; LoadedRow row; ReadCSV csv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); if (0 == 0) { goto Label_00B7; } Label_0023: csv.Close(); return; Label_0034: writer.Close(); goto Label_0073; Label_003E: if (csv.Next()) { goto Label_0062; } Label_0046: base.ReportDone(false); if (-1 != 0) { if (-1 != 0) { if (0 != 0) { goto Label_00C6; } goto Label_0034; } goto Label_0073; } Label_0054: if (0 == 0) { goto Label_003E; } Label_0062: if (!base.ShouldStop()) { base.UpdateStatus(false); if (0 != 0) { goto Label_0034; } row = new LoadedRow(csv); goto Label_0076; } goto Label_0046; Label_0073: if (0 == 0) { if (-2147483648 == 0) { return; } goto Label_0023; } Label_0076: if (this.x023aea3c4dad7033(row)) { base.WriteRow(writer, row); if (0 == 0) { this._xa893fbcbca51543c++; if (0xff == 0) { goto Label_0076; } goto Label_003E; } if (0 == 0) { goto Label_0076; } } else { goto Label_0054; } Label_00B7: writer = base.PrepareOutputFile(outputFile); this._xa893fbcbca51543c = 0; Label_00C6: base.ResetStatus(); goto Label_003E; }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">The method to use.</param> public void Process(FileInfo outputFile, IMLRegression method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); if (method.InputCount > _inputCount) { throw new AnalystError("This machine learning method has " + method.InputCount + " inputs, however, the data has " + _inputCount + " inputs."); } var input = new BasicMLData(method.InputCount); StreamWriter tw = AnalystPrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _idealCount); int dataIndex = 0; // load the input data for (int i = 0; i < _inputCount; i++) { String str = row.Data[i]; double d = Format.Parse(str); input[i] = d; dataIndex++; } // do we need to skip the ideal values? dataIndex += _idealCount; // compute the result IMLData output = method.Compute(input); // display the computed result for (int i = 0; i < _outputCount; i++) { double d = output[i]; row.Data[dataIndex++] = Format.Format(d, Precision); } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Process the input file. /// </summary> /// /// <param name="outputFile">The output file to write to.</param> public void Process(FileInfo outputFile) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, InputFormat); StreamWriter tw = PrepareOutputFile(outputFile); _filteredCount = 0; ResetStatus(); while (csv.Next() && !ShouldStop()) { UpdateStatus(false); var row = new LoadedRow(csv); if (ShouldProcess(row)) { WriteRow(tw, row); _filteredCount++; } } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">THe method to use.</param> public void Process(FileInfo outputFile, IMLMethod method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); IMLData output; int outputLength = _analyst.DetermineTotalInputFieldCount(); StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _outputColumns); double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst, _analystHeaders, csv, outputLength, true); if (_series.TotalDepth > 1) { inputArray = _series.Process(inputArray); } if (inputArray != null) { IMLData input = new BasicMLData(inputArray); // evaluation data if ((method is IMLClassification) && !(method is IMLRegression)) { // classification only? output = new BasicMLData(1); output[0] = ((IMLClassification) method).Classify(input); } else { // regression output = ((IMLRegression) method).Compute(input); } // skip file data int index = _fileColumns; int outputIndex = 0; // display output foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields) { if (_analystHeaders.Find(field.Name) != -1) { if (field.Output) { if (field.Classify) { // classification ClassItem cls = field.DetermineClass( outputIndex, output.Data); outputIndex += field.ColumnsNeeded; if (cls == null) { row.Data[index++] = "?Unknown?"; } else { row.Data[index++] = cls.Name; } } else { // regression double n = output[outputIndex++]; n = field.DeNormalize(n); row.Data[index++] = Format .Format(n, Precision); } } } } } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Load the buffer from the underlying file. /// </summary> /// /// <param name="csv">The CSV file to load from.</param> private void LoadBuffer(ReadCSV csv) { for (int i = 0; i < _buffer.Length; i++) { _buffer[i] = null; } int index = 0; while (csv.Next() && (index < _bufferSize) && !ShouldStop()) { var row = new LoadedRow(csv); _buffer[index++] = row; } _remaining = index; }
/// <summary> /// Construct the cluster row. /// </summary> /// /// <param name="input">The input data.</param> /// <param name="theRow">The CSV row.</param> public ClusterRow(double[] input, LoadedRow theRow) : base(new BasicMLData(input)) { _row = theRow; }
private void xcc7d420ca2a80044() { int num; base.ResetStatus(); ReadCSV csv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); goto Label_00AB; Label_0018: csv.Close(); return; Label_005B: if (!base.ShouldStop()) { base.UpdateStatus("Reading input file"); LoadedRow item = new LoadedRow(csv); this._x4a3f0a05c02f235f.Add(item); goto Label_00AB; } Label_0063: base.Count = csv.ColumnCount; if (((((uint) num) - ((uint) num)) >= 0) && !base.ExpectInputHeaders) { goto Label_0018; } base.InputHeadings = new string[csv.ColumnCount]; for (num = 0; num < csv.ColumnCount; num++) { base.InputHeadings[num] = csv.ColumnNames[num]; } if (3 != 0) { if (2 != 0) { goto Label_0018; } goto Label_005B; } Label_00AB: if (!csv.Next()) { goto Label_0063; } goto Label_005B; }
public void Process(FileInfo outputFile, IMLMethod method) { IMLData data; LoadedRow row; double[] numArray; IMLData data2; int num2; int num3; double num4; ReadCSV csv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); int outputLength = this._x554f16462d8d4675.DetermineTotalInputFieldCount(); StreamWriter tw = this.xf911a8958011bd6d(outputFile); base.ResetStatus(); goto Label_0270; Label_0042: if ((((uint) num4) - ((uint) outputLength)) < 0) { goto Label_02CF; } using (IEnumerator<AnalystField> enumerator = this._x554f16462d8d4675.Script.Normalize.NormalizedFields.GetEnumerator()) { AnalystField field; ClassItem item; goto Label_0087; Label_007B: if (field.Output) { goto Label_012C; } Label_0087: if (enumerator.MoveNext()) { goto Label_0226; } goto Label_0267; Label_0098: if ((this._xc5416b6511261016.Find(field.Name) == -1) && (((uint) num3) >= 0)) { goto Label_0087; } goto Label_007B; Label_00C4: if (((uint) num2) >= 0) { if (-2147483648 != 0) { } num4 = field.DeNormalize(num4); row.Data[num2++] = base.InputFormat.Format(num4, base.Precision); } if ((((uint) num4) | 3) == 0) { goto Label_0234; } goto Label_0087; Label_012C: if (field.Classify) { goto Label_01F8; } Label_0138: num4 = data[num3++]; goto Label_00C4; Label_0156: row.Data[num2++] = item.Name; goto Label_0087; Label_0171: row.Data[num2++] = "?Unknown?"; goto Label_0087; Label_018A: if (item == null) { goto Label_0171; } if ((((uint) num3) + ((uint) num3)) >= 0) { goto Label_01DB; } Label_01A6: num3 += field.ColumnsNeeded; if (((uint) num2) <= uint.MaxValue) { goto Label_018A; } if (0 != 0) { goto Label_0234; } if ((((uint) num2) & 0) == 0) { goto Label_0171; } Label_01DB: if ((((uint) num3) - ((uint) num3)) >= 0) { goto Label_0156; } goto Label_0138; Label_01F8: item = field.DetermineClass(num3, data.Data); if ((((uint) outputLength) | 0xfffffffe) != 0) { goto Label_01A6; } goto Label_0138; Label_0226: field = enumerator.Current; goto Label_0098; Label_0234: if ((((uint) num3) - ((uint) outputLength)) >= 0) { goto Label_007B; } goto Label_012C; } Label_0267: base.WriteRow(tw, row); Label_0270: if (csv.Next()) { base.UpdateStatus(false); goto Label_03BA; } base.ReportDone(false); tw.Close(); if (-1 == 0) { goto Label_0267; } csv.Close(); if (2 != 0) { return; } goto Label_0042; Label_02B1: data = ((IMLRegression) method).Compute(data2); Label_02BF: num2 = this._x146688677da5adf5; num3 = 0; goto Label_0042; Label_02CF: data[0] = ((IMLClassification) method).Classify(data2); goto Label_02BF; Label_02FE: if (method is IMLRegression) { goto Label_02B1; } data = new BasicMLData(1); goto Label_03A2; Label_0312: if (numArray == null) { goto Label_0267; } data2 = new BasicMLData(numArray); if ((((uint) num4) + ((uint) num4)) >= 0) { if (method is IMLClassification) { goto Label_02FE; } if ((((uint) num4) | 0x7fffffff) == 0) { goto Label_03BA; } goto Label_02B1; } Label_0387: if ((((uint) num3) + ((uint) outputLength)) >= 0) { if ((((uint) num4) - ((uint) num4)) < 0) { goto Label_0312; } goto Label_02FE; } Label_03A2: if ((((uint) outputLength) + ((uint) num4)) >= 0) { if ((((uint) num3) + ((uint) num4)) >= 0) { goto Label_02CF; } goto Label_02B1; } Label_03BA: row = new LoadedRow(csv, this._x1402a42b31a31090); numArray = AnalystNormalizeCSV.ExtractFields(this._x554f16462d8d4675, this._xc5416b6511261016, csv, outputLength, true); if ((((uint) num3) >= 0) && (this._x7acb8518c8ed6133.TotalDepth <= 1)) { if ((((uint) outputLength) & 0) == 0) { goto Label_0312; } goto Label_0387; } numArray = this._x7acb8518c8ed6133.Process(numArray); goto Label_0312; }
public void Process(FileInfo outputFile, int targetField, int countPer) { ReadCSV dcsv; LoadedRow row; string str; int num; base.ValidateAnalyzed(); StreamWriter tw = base.PrepareOutputFile(outputFile); this._x4de68924842740c8 = new Dictionary<string, int>(); goto Label_0129; Label_0019: if (dcsv.Next()) { goto Label_0056; } Label_0021: base.ReportDone(false); dcsv.Close(); tw.Close(); if ((((uint) countPer) | 0x7fffffff) == 0) { goto Label_0106; } if (8 == 0) { goto Label_0129; } return; Label_0056: if (!base.ShouldStop()) { row = new LoadedRow(dcsv); base.UpdateStatus(false); goto Label_00FD; } goto Label_0021; Label_00AC: num = this._x4de68924842740c8[str]; Label_00BA: if (num < countPer) { base.WriteRow(tw, row); num++; } this._x4de68924842740c8[str] = num; if ((((uint) countPer) + ((uint) num)) >= 0) { goto Label_0019; } if ((((uint) num) - ((uint) targetField)) <= uint.MaxValue) { goto Label_0056; } Label_00FD: str = row.Data[targetField]; Label_0106: if (this._x4de68924842740c8.ContainsKey(str)) { goto Label_00AC; } goto Label_0158; Label_0129: dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); if (((uint) num) >= 0) { base.ResetStatus(); goto Label_0019; } Label_0158: if ((((uint) num) - ((uint) countPer)) <= uint.MaxValue) { num = 0; if (0xff != 0) { } goto Label_00BA; } goto Label_00AC; }
public ClusterRow(double[] input, LoadedRow theRow) : base(new BasicMLData(input)) { this._xa806b754814b9ae0 = theRow; }
public void Process(FileInfo outputFile, IMLRegression method) { IMLData data; StreamWriter writer; LoadedRow row; int num; int num2; string str; double num3; IMLData data2; int num4; double num5; object[] objArray; ReadCSV csv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); goto Label_0285; Label_006D: if (csv.Next()) { base.UpdateStatus(false); row = new LoadedRow(csv, this._xb52d4a98fad404da); num = 0; if ((((uint) num2) | 4) == 0) { goto Label_011D; } goto Label_010D; } if ((((uint) num3) & 0) == 0) { base.ReportDone(false); writer.Close(); csv.Close(); if ((((uint) num) + ((uint) num3)) < 0) { if ((((uint) num) - ((uint) num2)) <= uint.MaxValue) { goto Label_010D; } goto Label_00D5; } } if ((((uint) num) & 0) == 0) { return; } goto Label_0165; Label_00A6: if (((uint) num5) < 0) { goto Label_01F9; } num4++; Label_00C1: if (num4 < this._x98cf41c6b0eaf6ab) { num5 = data2[num4]; row.Data[num++] = base.InputFormat.Format(num5, base.Precision); goto Label_00A6; } base.WriteRow(writer, row); goto Label_006D; Label_00D5: data2 = method.Compute(data); num4 = 0; goto Label_00C1; Label_00F6: if (num2 < this._x43f451310e815b76) { str = row.Data[num2]; goto Label_011D; } Label_0100: num += this._xb52d4a98fad404da; goto Label_00D5; Label_010D: num2 = 0; goto Label_00F6; Label_011D: num3 = base.InputFormat.Parse(str); data[num2] = num3; num++; num2++; goto Label_00F6; Label_0165: if (0 != 0) { goto Label_00A6; } goto Label_006D; Label_01C8: data = new BasicMLData(method.InputCount); writer = this.x972236628de6c041(outputFile); if ((((uint) num) | 2) == 0) { goto Label_0100; } base.ResetStatus(); goto Label_0165; Label_01F9: objArray[2] = " inputs, however, the data has "; if ((((uint) num4) + ((uint) num)) <= uint.MaxValue) { objArray[3] = this._x43f451310e815b76; objArray[4] = " inputs."; throw new AnalystError(string.Concat(objArray)); } Label_0285: if (((((uint) num2) | 15) != 0) && (method.InputCount == this._x43f451310e815b76)) { goto Label_01C8; } objArray = new object[5]; objArray[0] = "This machine learning method has "; if ((((uint) num) - ((uint) num4)) > uint.MaxValue) { goto Label_01C8; } objArray[1] = method.InputCount; goto Label_01F9; }
public void Process(FileInfo outputFile, IMLRegression method) { IMLData data; StreamWriter writer; LoadedRow row; int num; int num2; double num3; IMLData data2; int num4; double num5; object[] objArray; ReadCSV csv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); goto Label_028E; Label_0022: if (csv.Next()) { base.UpdateStatus(false); row = new LoadedRow(csv, this._xb52d4a98fad404da); num = 0; if ((((uint) num3) + ((uint) num4)) < 0) { goto Label_0208; } num2 = 0; } else { if ((((uint) num5) + ((uint) num2)) < 0) { goto Label_026E; } base.ReportDone(false); writer.Close(); csv.Close(); if (0 == 0) { return; } goto Label_028E; } Label_0125: if (num2 < this._x43f451310e815b76) { string str = row.Data[num2]; num3 = base.InputFormat.Parse(str); data[num2] = num3; if ((((uint) num5) + ((uint) num3)) <= uint.MaxValue) { goto Label_0148; } goto Label_0196; } num += this._xb52d4a98fad404da; Label_013A: data2 = method.Compute(data); num4 = 0; goto Label_0196; Label_0148: num++; num2++; if ((((uint) num) - ((uint) num3)) > uint.MaxValue) { goto Label_01EC; } goto Label_0125; Label_0196: if ((((uint) num3) + ((uint) num3)) >= 0) { Label_008D: if (num4 >= this._x98cf41c6b0eaf6ab) { base.WriteRow(writer, row); } else { num5 = data2[num4]; if ((((uint) num2) - ((uint) num)) < 0) { goto Label_0125; } if ((((uint) num2) | 15) != 0) { row.Data[num++] = base.InputFormat.Format(num5, base.Precision); num4++; goto Label_008D; } } if (((uint) num3) < 0) { if (((uint) num5) <= uint.MaxValue) { goto Label_0125; } goto Label_0148; } } goto Label_02BE; Label_01EC: base.ResetStatus(); goto Label_0022; Label_0208: data = new BasicMLData(method.InputCount); writer = this.x972236628de6c041(outputFile); if ((((uint) num5) + ((uint) num4)) >= 0) { goto Label_01EC; } goto Label_0125; Label_026E: objArray[4] = " inputs."; if (((uint) num2) < 0) { goto Label_013A; } if (((uint) num) >= 0) { throw new AnalystError(string.Concat(objArray)); } goto Label_02BE; Label_028E: if (method.InputCount <= this._x43f451310e815b76) { goto Label_0208; } objArray = new object[5]; objArray[0] = "This machine learning method has "; objArray[1] = method.InputCount; objArray[2] = " inputs, however, the data has "; objArray[3] = this._x43f451310e815b76; goto Label_026E; Label_02BE: if ((((uint) num) - ((uint) num2)) <= uint.MaxValue) { goto Label_0022; } }
public void LoadRow(LoadedRow row) { data.Insert(0, row); if (data.Count > totalWindowSize) { data.RemoveAt(data.Count - 1); } }
public void WriteRow(StreamWriter tw, LoadedRow row) { string[] strArray; int num; StringBuilder line = new StringBuilder(); if (0 == 0) { goto Label_004B; } Label_000F: while (num >= strArray.Length) { tw.WriteLine(line.ToString()); if (-2 != 0) { if (8 != 0) { return; } goto Label_004B; } } string str = strArray[num]; Label_002E: AppendSeparator(line, this._x3bd332f47a4845e2); line.Append(str); num++; goto Label_000F; Label_004B: strArray = row.Data; num = 0; if (2 == 0) { goto Label_002E; } goto Label_000F; }
private void xc4041c33ab048f27(ReadCSV xe4aa442e12986e06) { int num2; int index = 0; goto Label_0092; Label_0018: this._x77dede646085d71e = num2; if (8 != 0) { if (0 == 0) { if ((((uint) index) + ((uint) index)) >= 0) { return; } goto Label_0092; } goto Label_006F; } goto Label_0048; Label_003E: if (!xe4aa442e12986e06.Next()) { goto Label_0018; } Label_0028: if (15 != 0) { if ((num2 < this._xb85b7645153fc718) && !base.ShouldStop()) { LoadedRow row = new LoadedRow(xe4aa442e12986e06); if (0 == 0) { this._x5cafa8d49ea71ea1[num2++] = row; goto Label_003E; } goto Label_0028; } goto Label_0018; } Label_0048: index++; Label_004C: if (index >= this._x5cafa8d49ea71ea1.Length) { num2 = 0; goto Label_003E; } Label_006F: this._x5cafa8d49ea71ea1[index] = null; goto Label_0048; Label_0092: if (((uint) num2) < 0) { goto Label_003E; } goto Label_004C; }
/// <summary> /// Process and balance the data. /// </summary> /// <param name="outputFile">The output file to write data to.</param> /// <param name="targetField"></param> /// <param name="countPer">The desired count per class.</param> public void Process(FileInfo outputFile, int targetField, int countPer) { ValidateAnalyzed(); StreamWriter tw = PrepareOutputFile(outputFile); _counts = new Dictionary<String, Int32>(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ResetStatus(); while (csv.Next() && !ShouldStop()) { var row = new LoadedRow(csv); UpdateStatus(false); String key = row.Data[targetField]; int count; if (!_counts.ContainsKey(key)) { count = 0; } else { count = _counts[key]; } if (count < countPer) { WriteRow(tw, row); count++; } _counts[key] = count; } ReportDone(false); csv.Close(); tw.Close(); }
public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { int num; int num2; ReadCSV dcsv; base.InputFilename = inputFile; if (0 == 0) { if ((((uint) headers) + ((uint) num2)) < 0) { goto Label_00CC; } base.ExpectInputHeaders = headers; base.InputFormat = format; if ((((uint) num) + ((uint) headers)) >= 0) { base.Analyzed = true; this._x554f16462d8d4675 = theAnalyst; if (base.OutputFormat == null) { base.OutputFormat = base.InputFormat; if (((uint) num2) < 0) { goto Label_007E; } } } } goto Label_0184; Label_0044: base.RecordCount = num; base.Count = dcsv.ColumnCount; base.ReadHeaders(dcsv); dcsv.Close(); base.ReportDone(true); if ((((uint) num2) | 0xfffffffe) != 0) { return; } goto Label_0184; Label_0074: if (!base.ShouldStop()) { base.UpdateStatus(true); LoadedRow theRow = new LoadedRow(dcsv, 1); double[] input = AnalystNormalizeCSV.ExtractFields(this._x554f16462d8d4675, this._xc5416b6511261016, dcsv, num2, true); if ((((uint) num2) + ((uint) num)) >= 0) { ClusterRow inputData = new ClusterRow(input, theRow); this._x4a3f0a05c02f235f.Add(inputData); if ((((uint) num2) + ((uint) num2)) >= 0) { num++; if ((((uint) num2) & 0) == 0) { goto Label_007E; } goto Label_0074; } goto Label_00C5; } goto Label_011C; } goto Label_0044; Label_007E: if (dcsv.Next()) { goto Label_0074; } goto Label_0044; Label_00C5: if (2 == 0) { goto Label_0074; } Label_00CC: this._xc5416b6511261016 = new CSVHeaders(base.InputHeadings); goto Label_007E; Label_011C: num2 = this._x554f16462d8d4675.DetermineTotalColumns(); dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); base.ReadHeaders(dcsv); goto Label_00C5; Label_0184: this._x4a3f0a05c02f235f = new BasicMLDataSet(); base.ResetStatus(); if ((((uint) headers) - ((uint) num)) > uint.MaxValue) { goto Label_0044; } num = 0; goto Label_011C; }
public void Process() { ReadCSV dcsv; this.x461c3bf969128260(); Label_0006: dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); base.ResetStatus(); using (IEnumerator<SegregateTargetPercent> enumerator = this._x2ea7a1eff81ae7c0.GetEnumerator()) { SegregateTargetPercent percent; StreamWriter writer; goto Label_0044; Label_0038: if (0 != 0) { goto Label_00D1; } Label_003E: writer.Close(); Label_0044: if (enumerator.MoveNext()) { goto Label_00D1; } if (0 == 0) { goto Label_00EE; } if (0 == 0) { goto Label_00D1; } if (0 == 0) { goto Label_00BC; } if (0 == 0) { goto Label_0098; } goto Label_003E; Label_0067: if (percent.NumberRemaining > 0) { goto Label_0086; } if (0 == 0) { goto Label_00B9; } goto Label_0098; Label_0075: percent.NumberRemaining--; Label_0083: if (0 == 0) { goto Label_0067; } Label_0086: if (!dcsv.Next() || base.ShouldStop()) { goto Label_003E; } Label_0098: base.UpdateStatus(false); LoadedRow row = new LoadedRow(dcsv); base.WriteRow(writer, row); if (4 == 0) { goto Label_0083; } goto Label_0075; Label_00B9: if (0 == 0) { goto Label_00CE; } Label_00BC: writer = base.PrepareOutputFile(percent.Filename); goto Label_0067; Label_00CE: if (0 == 0) { goto Label_0038; } Label_00D1: percent = enumerator.Current; goto Label_00BC; } Label_00EE: base.ReportDone(false); if (0 != 0) { goto Label_0006; } dcsv.Close(); }
/// <summary> /// Analyze the data. This counts the records and prepares the data to be /// processed. /// </summary> /// /// <param name="theAnalyst">The analyst to use.</param> /// <param name="inputFile">The input file to analyze.</param> /// <param name="headers">True, if the input file has headers.</param> /// <param name="format">The format of the input file.</param> public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { InputFilename = inputFile; ExpectInputHeaders = headers; Format = format; Analyzed = true; _analyst = theAnalyst; _data = new BasicMLDataSet(); ResetStatus(); int recordCount = 0; int outputLength = _analyst.DetermineTotalColumns(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ReadHeaders(csv); _analystHeaders = new CSVHeaders(InputHeadings); while (csv.Next() && !ShouldStop()) { UpdateStatus(true); var row = new LoadedRow(csv, 1); double[] inputArray = AnalystNormalizeCSV.ExtractFields( _analyst, _analystHeaders, csv, outputLength, true); var input = new ClusterRow(inputArray, row); _data.Add(input); recordCount++; } RecordCount = recordCount; Count = csv.ColumnCount; ReadHeaders(csv); csv.Close(); ReportDone(true); }
/// <summary> /// Determine if the specified row should be processed, or not. /// </summary> /// /// <param name="row">The row.</param> /// <returns>True, if the row should be processed.</returns> private bool ShouldProcess(LoadedRow row) { return _excludedFields.All(field => !row.Data[field.FieldNumber].Trim().Equals(field.FieldValue.Trim())); }
/// <summary> /// Process the file and cluster. /// </summary> /// <param name="outputFile">The output file.</param> /// <param name="clusters">The number of clusters.</param> /// <param name="theAnalyst">The analyst to use.</param> /// <param name="iterations">The number of iterations to use.</param> public void Process(FileInfo outputFile, int clusters, EncogAnalyst theAnalyst, int iterations) { StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); var cluster = new KMeansClustering(clusters, _data); cluster.Iteration(iterations); int clusterNum = 0; foreach (IMLCluster cl in cluster.Clusters) { foreach (IMLData item in cl.Data) { int clsIndex = item.Count; var lr = new LoadedRow(Format, item, 1); lr.Data[clsIndex] = "" + clusterNum; WriteRow(tw, lr); } clusterNum++; } ReportDone(false); tw.Close(); }
/// <summary> /// Process the input file and segregate into the output files. /// </summary> /// public void Process() { Validate(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, InputFormat); ResetStatus(); foreach (SegregateTargetPercent target in _targets) { StreamWriter tw = PrepareOutputFile(target.Filename); while ((target.NumberRemaining > 0) && csv.Next() && !ShouldStop()) { UpdateStatus(false); var row = new LoadedRow(csv); WriteRow(tw, row); target.NumberRemaining = target.NumberRemaining - 1; } tw.Close(); } ReportDone(false); csv.Close(); }
private bool x023aea3c4dad7033(LoadedRow xa806b754814b9ae0) {