/// <summary> /// Construct the time-series utility. /// </summary> /// <param name="theAnalyst">The analyst to use.</param> /// <param name="includeOutput">Should output fields be included.</param> /// <param name="headings">The column headings.</param> public TimeSeriesUtil(EncogAnalyst theAnalyst, bool includeOutput, IEnumerable <string> headings) { _buffer = new List <double[]>(); _headingMap = new Dictionary <String, Int32>(); _analyst = theAnalyst; _lagDepth = _analyst.LagDepth; _leadDepth = _analyst.LeadDepth; _totalDepth = _lagDepth + _leadDepth + 1; _inputSize = includeOutput ? _analyst.DetermineTotalColumns() : _analyst.DetermineTotalInputFieldCount(); _outputSize = _analyst.DetermineInputCount() + _analyst.DetermineOutputCount(); int headingIndex = 0; foreach (String column in headings) { _headingMap[column.ToUpper()] = headingIndex++; } }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">THe method to use.</param> public void Process(FileInfo outputFile, IMLMethod method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); IMLData output; foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields) { field.Init(); } int outputLength = _analyst.DetermineTotalInputFieldCount(); StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _outputColumns); double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst, _analystHeaders, csv, outputLength, true); if (_series.TotalDepth > 1) { inputArray = _series.Process(inputArray); } if (inputArray != null) { IMLData input = new BasicMLData(inputArray); // evaluation data if ((method is IMLClassification) && !(method is IMLRegression)) { // classification only? var tmp = new BasicMLData(1); tmp[0] = ((IMLClassification)method).Classify(input); output = tmp; } else { // regression output = ((IMLRegression)method).Compute(input); } // skip file data int index = _fileColumns; int outputIndex = 0; // display output foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields) { if (_analystHeaders.Find(field.Name) != -1) { if (field.Output) { if (field.Classify) { // classification ClassItem cls = field.DetermineClass( outputIndex, output); outputIndex += field.ColumnsNeeded; if (cls == null) { row.Data[index++] = "?Unknown?"; } else { row.Data[index++] = cls.Name; } } else { // regression double n = output[outputIndex++]; n = field.DeNormalize(n); row.Data[index++] = Format .Format(n, Precision); } } } } } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Construct the time-series utility. /// </summary> /// <param name="theAnalyst">The analyst to use.</param> /// <param name="includeOutput">Should output fields be included.</param> /// <param name="headings">The column headings.</param> public TimeSeriesUtil(EncogAnalyst theAnalyst, bool includeOutput, IEnumerable<string> headings) { _buffer = new List<double[]>(); _headingMap = new Dictionary<String, Int32>(); _analyst = theAnalyst; _lagDepth = _analyst.LagDepth; _leadDepth = _analyst.LeadDepth; _totalDepth = _lagDepth + _leadDepth + 1; _inputSize = includeOutput ? _analyst.DetermineTotalColumns() : _analyst.DetermineTotalInputFieldCount(); _outputSize = _analyst.DetermineInputCount() + _analyst.DetermineOutputCount(); int headingIndex = 0; foreach (String column in headings) { _headingMap[column.ToUpper()] = headingIndex++; } }