protected override void LoadTestData(string testFile) { ReadCSV test_csv = new ReadCSV(testFile, true, CSVFormat.DecimalPoint); List<double[]> test_input = new List<double[]>(); test_input_orig = new List<double[]>(); while (test_csv.Next()) { double x = test_csv.GetDouble(0); test_input.Add(new[] { x }); test_input_orig.Add(new[] { x }); } test_csv.Close(); //Analyze(ref test_input); Normalize(ref test_input, ref vmin, ref vmax); testData = new List<IMLData>(); foreach (var d in test_input) { testData.Add(new BasicMLData(d)); } }
/// <summary> /// Load a CSV file into a memory dataset. /// </summary> /// /// <param name="format">The CSV format to use.</param> /// <param name="filename">The filename to load.</param> /// <param name="headers">True if there is a header line.</param> /// <param name="inputSize">The input size. Input always comes first in a file.</param> /// <param name="idealSize">The ideal size, 0 for unsupervised.</param> /// <returns>A NeuralDataSet that holds the contents of the CSV file.</returns> public static IMLDataSet LoadCSVTOMemory(CSVFormat format, String filename, bool headers, int inputSize, int idealSize) { var result = new BasicMLDataSet(); var csv = new ReadCSV(filename, headers, format); while (csv.Next()) { BasicMLData ideal = null; int index = 0; var input = new BasicMLData(inputSize); for (int i = 0; i < inputSize; i++) { double d = csv.GetDouble(index++); input[i] = d; } if (idealSize > 0) { ideal = new BasicMLData(idealSize); for (int i = 0; i < idealSize; i++) { double d = csv.GetDouble(index++); ideal[i] = d; } } IMLDataPair pair = new BasicMLDataPair(input, ideal); result.Add(pair); } return result; }
/// <summary> /// Process the file and output to the target file. /// </summary> /// <param name="target">The target file to write to.</param> public void Process(string target) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); TextWriter tw = new StreamWriter(target); ResetStatus(); while (csv.Next()) { var line = new StringBuilder(); UpdateStatus(false); line.Append(GetColumnData(FileData.Date, csv)); line.Append(" "); line.Append(GetColumnData(FileData.Time, csv)); line.Append(";"); line.Append(Format.Format(double.Parse(GetColumnData(FileData.Open, csv)), Precision)); line.Append(";"); line.Append(Format.Format(double.Parse(GetColumnData(FileData.High, csv)), Precision)); line.Append(";"); line.Append(Format.Format(double.Parse(GetColumnData(FileData.Low, csv)), Precision)); line.Append(";"); line.Append(Format.Format(double.Parse(GetColumnData(FileData.Close, csv)), Precision)); line.Append(";"); line.Append(Format.Format(double.Parse(GetColumnData(FileData.Volume, csv)), Precision)); tw.WriteLine(line.ToString()); } ReportDone(false); csv.Close(); tw.Close(); }
/// <summary> /// Reads the CSV and call loader. /// Used internally to load the csv and place data in the marketdataset. /// </summary> /// <param name="symbol">The symbol.</param> /// <param name="neededTypes">The needed types.</param> /// <param name="from">From.</param> /// <param name="to">To.</param> /// <param name="File">The file.</param> /// <returns></returns> ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IEnumerable<MarketDataType> neededTypes, DateTime from, DateTime to, string File) { //We got a file, lets load it. ICollection<LoadedMarketData> result = new List<LoadedMarketData>(); ReadCSV csv = new ReadCSV(File, true, CSVFormat.English); //In case we want to use a different date format...and have used the SetDateFormat method, our DateFormat must then not be null.. //We will use the ?? operator to check for nullables. csv.DateFormat = DateFormat ?? "yyyy-MM-dd HH:mm:ss"; csv.TimeFormat = "HH:mm:ss"; DateTime ParsedDate = from; bool writeonce = true; while (csv.Next()) { DateTime date = csv.GetDate(0); ParsedDate = date; if (writeonce) { Console.WriteLine(@"First parsed date in csv:" + ParsedDate.ToShortDateString()); Console.WriteLine(@"Stopping at date:" + to.ToShortDateString()); Console.WriteLine(@"Current DateTime:" + ParsedDate.ToShortDateString() + @" Time:" + ParsedDate.ToShortTimeString() + @" Asked Start date was " + from.ToShortDateString()); writeonce = false; } if (ParsedDate >= from && ParsedDate <= to) { DateTime datex = csv.GetDate(0); double open = csv.GetDouble(1); double close = csv.GetDouble(2); double high = csv.GetDouble(3); double low = csv.GetDouble(4); double volume = csv.GetDouble(5); double range = Math.Abs(open - close); double HighLowRange = Math.Abs(high - low); double DirectionalRange = close - open; LoadedMarketData data = new LoadedMarketData(datex, symbol); data.SetData(MarketDataType.Open, open); data.SetData(MarketDataType.High, high); data.SetData(MarketDataType.Low, low); data.SetData(MarketDataType.Close, close); data.SetData(MarketDataType.Volume, volume); data.SetData(MarketDataType.RangeHighLow, Math.Round(HighLowRange, 6)); data.SetData(MarketDataType.RangeOpenClose, Math.Round(range, 6)); data.SetData(MarketDataType.RangeOpenCloseNonAbsolute, Math.Round(DirectionalRange, 6)); result.Add(data); } } csv.Close(); return result; }
public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IList<MarketDataType> neededTypes, DateTime from, DateTime to, string File) { try { //We got a file, lets load it. ICollection<LoadedMarketData> result = new List<LoadedMarketData>(); ReadCSV csv = new ReadCSV(File, true, CSVFormat.English); csv.DateFormat = "yyyy.MM.dd HH:mm:ss"; DateTime ParsedDate = from; // Time,Open,High,Low,Close,Volume while (csv.Next() && ParsedDate >= from && ParsedDate <= to ) { DateTime date = csv.GetDate("Time"); double Bid= csv.GetDouble("Bid"); double Ask = csv.GetDouble("Ask"); double AskVolume = csv.GetDouble("AskVolume"); double BidVolume= csv.GetDouble("BidVolume"); double _trade = ( Bid + Ask ) /2; double _tradeSize = (AskVolume + BidVolume) / 2; LoadedMarketData data = new LoadedMarketData(date, symbol); data.SetData(MarketDataType.Trade, _trade); data.SetData(MarketDataType.Volume, _tradeSize); result.Add(data); Console.WriteLine("Current DateTime:"+ParsedDate.ToShortDateString()+ " Time:"+ParsedDate.ToShortTimeString() +" Start date was "+from.ToShortDateString()); Console.WriteLine("Stopping at date:" + to.ToShortDateString() ); ParsedDate = date; //double open = csv.GetDouble("Open"); //double close = csv.GetDouble("High"); //double high = csv.GetDouble("Low"); //double low = csv.GetDouble("Close"); //double volume = csv.GetDouble("Volume"); //LoadedMarketData data = new LoadedMarketData(date, symbol); //data.SetData(MarketDataType.Open, open); //data.SetData(MarketDataType.High, high); //data.SetData(MarketDataType.Low, low); //data.SetData(MarketDataType.Close, close); //data.SetData(MarketDataType.Volume, volume); result.Add(data); } csv.Close(); return result; } catch (Exception ex) { Console.WriteLine("Something went wrong reading the csv"); Console.WriteLine("Something went wrong reading the csv:" + ex.Message); } Console.WriteLine("Something went wrong reading the csv"); return null; }
/// <summary> /// Construct a loaded row. /// </summary> /// /// <param name="csv">The CSV file to use.</param> /// <param name="extra">The number of extra columns to add.</param> public LoadedRow(ReadCSV csv, int extra) { int count = csv.GetCount(); _data = new String[count + extra]; for (int i = 0; i < count; i++) { _data[i] = csv.Get(i); } }
/// <summary> /// parses one column of a csv and returns an array of doubles. /// you can only return one double array with this method. /// </summary> /// <param name="file">The file.</param> /// <param name="formatused">The formatused.</param> /// <param name="Name">The name of the column to parse..</param> /// <returns></returns> public static List<double> QuickParseCSV(string file, CSVFormat formatused, string Name) { List<double> returnedArrays = new List<double>(); ReadCSV csv = new ReadCSV(file, true, formatused); while (csv.Next()) { returnedArrays.Add(csv.GetDouble(Name)); } return returnedArrays; }
public void LoadTrainingData(string trainingDataPath, ProblemType problem, ActivationType activation) { TrainingDataPath = trainingDataPath; var csvReader = new ReadCSV(trainingDataPath, true, CSVFormat.DecimalPoint); var values = new List<double[]>(); var answers = new List<double[]>(); while (csvReader.Next()) { if (ProblemType.Classification == problem) { values.Add(new []{csvReader.GetDouble(0), csvReader.GetDouble(1)}); answers.Add(new []{csvReader.GetDouble(2)}); } else { values.Add(new[] { csvReader.GetDouble(0)}); answers.Add(new[] { csvReader.GetDouble(1) }); _originalRegressionValues.Add(values.Last()[0]); _originalRegressionAnswers.Add(answers.Last()[0]); } } csvReader.Close(); if (problem == ProblemType.Classification) { answers = SpreadClassificationAnswers(answers, activation); FirstLayerSize = 2; } else LastLayerSize = FirstLayerSize = 1; AnalizeValues(problem, values); Normalize(values, _valuesMins, _valuesMaxes, activation); if (problem == ProblemType.Regression) { AnalizeAnswers(answers); Normalize(answers, _answersMins, _answersMaxes, activation); } values.StableShuffle(); answers.StableShuffle(); ListExtensions.ResetStableShuffle(); var trainingSetSize = (int)(values.Count * 0.85); TrainingDataSet = new BasicMLDataSet(values.Take(trainingSetSize).ToArray(), answers.Take(trainingSetSize).ToArray()); ValidationDataSet = new BasicMLDataSet(values.Skip(trainingSetSize).ToArray(), answers.Skip(trainingSetSize).ToArray()); }
/// <summary> /// Generate the header fields. /// </summary> /// <param name="csv">The CSV file to use.</param> private void GenerateFields(ReadCSV csv) { if (_headers) { GenerateFieldsFromHeaders(csv); } else { GenerateFieldsFromCount(csv); } }
/// <summary> /// parses one column of a csv and returns an array of doubles. /// you can only return one double array with this method. /// We are assuming CSVFormat english in this quick parse csv method. /// You can input the size (number of lines) to read. /// </summary> /// <param name="file">The file.</param> /// <param name="Name">The name of the column to parse.</param> /// <param name="size">The size.</param> /// <returns></returns> public static List<double> QuickParseCSV(string file, string Name, int size) { List<double> returnedArrays = new List<double>(); ReadCSV csv = new ReadCSV(file, true, CSVFormat.English); int currentRead = 0; while (csv.Next() && currentRead < size) { returnedArrays.Add(csv.GetDouble(Name)); currentRead++; } return returnedArrays; }
public LoadedRow(ReadCSV csv, int extra) { int count; int num2; if ((((uint) num2) + ((uint) count)) >= 0) { } count = csv.GetCount(); this._x4a3f0a05c02f235f = new string[count + extra]; for (num2 = 0; num2 < count; num2++) { this._x4a3f0a05c02f235f[num2] = csv.Get(num2); } }
public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IList<MarketDataType> neededTypes, DateTime from, DateTime to,string File) { try { //We got a file, lets load it. ICollection<LoadedMarketData> result = new List<LoadedMarketData>(); ReadCSV csv = new ReadCSV(File, true,LoadedFormat); csv.DateFormat = DateTimeFormat.Normalize(); // Time,Open,High,Low,Close,Volume while (csv.Next()) { DateTime date = csv.GetDate("Time"); double open = csv.GetDouble("Open"); double close = csv.GetDouble("High"); double high = csv.GetDouble("Low"); double low = csv.GetDouble("Close"); double volume = csv.GetDouble("Volume"); LoadedMarketData data = new LoadedMarketData(date, symbol); data.SetData(MarketDataType.Open, open); data.SetData(MarketDataType.High, high); data.SetData(MarketDataType.Low, low); data.SetData(MarketDataType.Close, close); data.SetData(MarketDataType.Volume, volume); result.Add(data); } csv.Close(); return result; } catch (Exception ex) { Console.WriteLine("Something went wrong reading the csv"); Console.WriteLine("Something went wrong reading the csv:"+ex.Message); } Console.WriteLine("Something went wrong reading the csv"); return null; }
public void Process(FileInfo outputFile) { base.ValidateAnalyzed(); ReadCSV dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); if (0 == 0) { LoadedRow row; StreamWriter tw = base.PrepareOutputFile(outputFile); base.ResetStatus(); while ((row = this.x75f8dae9674cb841(dcsv)) != null) { base.WriteRow(tw, row); base.UpdateStatus(false); } base.ReportDone(false); tw.Close(); } dcsv.Close(); }
/// <summary> /// Load financial data from a CSV file. /// </summary> /// <param name="ticker">The ticker being loaded, ignored for a CSV load.</param> /// <param name="dataNeeded">The data needed.</param> /// <param name="from">The starting date.</param> /// <param name="to">The ending date.</param> /// <returns></returns> public ICollection<LoadedMarketData> Load(TickerSymbol ticker, IList<MarketDataType> dataNeeded, DateTime from, DateTime to) { try { if (File.Exists(TheFile)) { //We got a file, lets load it. TheFile = TheFile; ICollection<LoadedMarketData> result = new List<LoadedMarketData>(); var csv = new ReadCSV(TheFile, true, CSVFormat.English); // Time,Open,High,Low,Close,Volume while (csv.Next()) { DateTime date = csv.GetDate("Time"); double open = csv.GetDouble("Open"); double close = csv.GetDouble("High"); double high = csv.GetDouble("Low"); double low = csv.GetDouble("Close"); double volume = csv.GetDouble("Volume"); var data = new LoadedMarketData(date, ticker); data.SetData(MarketDataType.Open, open); data.SetData(MarketDataType.Volume, close); data.SetData(MarketDataType.High, high); data.SetData(MarketDataType.Low, low); data.SetData(MarketDataType.Volume, volume); result.Add(data); } csv.Close(); return result; } } catch (Exception ex) { throw new LoaderError(ex); } throw new LoaderError(@"Something went wrong reading the csv"); }
/// <summary> /// Analyze the data. This counts the records and prepares the data to be /// processed. /// </summary> /// <param name="theAnalyst">The analyst to use.</param> /// <param name="inputFile">The input file to analyze.</param> /// <param name="headers">True, if the input file has headers.</param> /// <param name="format">The format of the input file.</param> public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { InputFilename = inputFile; ExpectInputHeaders = headers; Format = format; Analyzed = true; _analyst = theAnalyst; _data = new BasicMLDataSet(); ResetStatus(); int recordCount = 0; int outputLength = _analyst.DetermineTotalColumns(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ReadHeaders(csv); _analystHeaders = new CSVHeaders(InputHeadings); while (csv.Next() && !ShouldStop()) { UpdateStatus(true); double[] inputArray = AnalystNormalizeCSV.ExtractFields( _analyst, _analystHeaders, csv, outputLength, true); IMLData input = new BasicMLData(inputArray); _data.Add(new BasicMLDataPair(input)); recordCount++; } RecordCount = recordCount; Count = csv.ColumnCount; ReadHeaders(csv); csv.Close(); ReportDone(true); }
/// <summary> /// Load financial data from Google. /// </summary> /// <param name="ticker">The ticker to load from.</param> /// <param name="dataNeeded">The data needed.</param> /// <param name="from">The starting time.</param> /// <param name="to">The ending time.</param> /// <returns>The loaded data.</returns> public ICollection<LoadedMarketData> Load(TickerSymbol ticker, IList<MarketDataType> dataNeeded, DateTime from, DateTime to) { ICollection<LoadedMarketData> result = new List<LoadedMarketData>(); Uri url = BuildUrl(ticker, from, to); WebRequest http = WebRequest.Create(url); var response = (HttpWebResponse) http.GetResponse(); if (response != null) using (Stream istream = response.GetResponseStream()) { var csv = new ReadCSV(istream, true, CSVFormat.DecimalPoint); while (csv.Next()) { DateTime date = csv.GetDate("date"); double open = csv.GetDouble("open"); double close = csv.GetDouble("close"); double high = csv.GetDouble("high"); double low = csv.GetDouble("low"); double volume = csv.GetDouble("volume"); var data = new LoadedMarketData(date, ticker); data.SetData(MarketDataType.Open, open); data.SetData(MarketDataType.Close, close); data.SetData(MarketDataType.High, high); data.SetData(MarketDataType.Low, low); data.SetData(MarketDataType.Open, open); data.SetData(MarketDataType.Volume, volume); result.Add(data); } csv.Close(); if (istream != null) istream.Close(); } return result; }
/// <summary> /// Construct the object. /// </summary> /// /// <param name="filename">The filename.</param> /// <param name="headers">False if headers are not extended.</param> /// <param name="format">The CSV format.</param> public CSVHeaders(FileInfo filename, bool headers, CSVFormat format) { _headerList = new List<String>(); _columnMapping = new Dictionary<String, Int32>(); ReadCSV csv = null; try { csv = new ReadCSV(filename.ToString(), headers, format); if (csv.Next()) { if (headers) { foreach (String str in csv.ColumnNames) { _headerList.Add(str); } } else { for (int i = 0; i < csv.ColumnCount; i++) { _headerList.Add("field:" + (i + 1)); } } } Init(); } finally { if (csv != null) { csv.Close(); } } }
private LoadedRow x75f8dae9674cb841(ReadCSV xe4aa442e12986e06) { int num; LoadedRow row; if (this._x77dede646085d71e == 0) { this.xc4041c33ab048f27(xe4aa442e12986e06); } while (this._x77dede646085d71e > 0) { num = RangeRandomizer.RandomInt(0, this._xb85b7645153fc718 - 1); do { if (this._x5cafa8d49ea71ea1[num] != null) { goto Label_0053; } } while ((((uint) num) - ((uint) num)) > uint.MaxValue); } if (0xff != 0) { if (15 != 0) { return null; } goto Label_0053; } Label_003C: this._x77dede646085d71e--; return row; Label_0053: row = this._x5cafa8d49ea71ea1[num]; this._x5cafa8d49ea71ea1[num] = null; goto Label_003C; }
/// <summary> /// Process the input file and segregate into the output files. /// </summary> public void Process() { Validate(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ResetStatus(); foreach (SegregateTargetPercent target in _targets) { StreamWriter tw = PrepareOutputFile(target.Filename); while ((target.NumberRemaining > 0) && csv.Next() && !ShouldStop()) { UpdateStatus(false); var row = new LoadedRow(csv); WriteRow(tw, row); target.NumberRemaining = target.NumberRemaining - 1; } tw.Close(); } ReportDone(false); csv.Close(); }
public void Process() { ReadCSV dcsv; this.x461c3bf969128260(); Label_0006: dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat); base.ResetStatus(); using (IEnumerator<SegregateTargetPercent> enumerator = this._x2ea7a1eff81ae7c0.GetEnumerator()) { SegregateTargetPercent percent; StreamWriter writer; goto Label_0044; Label_0038: if (0 != 0) { goto Label_00D1; } Label_003E: writer.Close(); Label_0044: if (enumerator.MoveNext()) { goto Label_00D1; } if (0 == 0) { goto Label_00EE; } if (0 == 0) { goto Label_00D1; } if (0 == 0) { goto Label_00BC; } if (0 == 0) { goto Label_0098; } goto Label_003E; Label_0067: if (percent.NumberRemaining > 0) { goto Label_0086; } if (0 == 0) { goto Label_00B9; } goto Label_0098; Label_0075: percent.NumberRemaining--; Label_0083: if (0 == 0) { goto Label_0067; } Label_0086: if (!dcsv.Next() || base.ShouldStop()) { goto Label_003E; } Label_0098: base.UpdateStatus(false); LoadedRow row = new LoadedRow(dcsv); base.WriteRow(writer, row); if (4 == 0) { goto Label_0083; } goto Label_0075; Label_00B9: if (0 == 0) { goto Label_00CE; } Label_00BC: writer = base.PrepareOutputFile(percent.Filename); goto Label_0067; Label_00CE: if (0 == 0) { goto Label_0038; } Label_00D1: percent = enumerator.Current; goto Label_00BC; } Label_00EE: base.ReportDone(false); if (0 != 0) { goto Label_0006; } dcsv.Close(); }
/// <summary> /// Private constructor. /// </summary> private PropertyConstraints() { _data = new Dictionary<String, List<PropertyEntry>>(); try { Stream mask0 = ResourceLoader.CreateStream("Encog.Resources.analyst.csv"); var csv = new ReadCSV(mask0, false, CSVFormat.EgFormat); while (csv.Next()) { String sectionStr = csv.Get(0); String nameStr = csv.Get(1); String typeStr = csv.Get(2); // determine type PropertyType t; if ("boolean".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeBoolean; } else if ("real".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeDouble; } else if ("format".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeFormat; } else if ("int".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeInteger; } else if ("list-string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeListString; } else if ("string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase)) { t = PropertyType.TypeString; } else { throw new AnalystError("Unknown type constraint: " + typeStr); } var entry = new PropertyEntry(t, nameStr, sectionStr); List<PropertyEntry> list; if (_data.ContainsKey(sectionStr)) { list = _data[sectionStr]; } else { list = new List<PropertyEntry>(); _data[sectionStr] = list; } list.Add(entry); } csv.Close(); mask0.Close(); } catch (IOException e) { throw new EncogError(e); } }
/// <summary> /// Called internally to open the CSV file with header. /// </summary> private void OpenCSV(bool headers) { // clear out any CSV files already there _csvMap.Clear(); _readCSV.Clear(); // only add each CSV once IDictionary<String, ReadCSV> uniqueFiles = new Dictionary<String, ReadCSV>(); // find the unique files foreach (IInputField field in _inputFields) { if (field is InputFieldCSV) { var csvField = (InputFieldCSV)field; String file = csvField.File; if (!uniqueFiles.ContainsKey(file)) { var csv = new ReadCSV(file, headers, _csvFormat); uniqueFiles[file] = csv; _readCSV.Add(csv); } _csvMap[csvField] = uniqueFiles[file]; } } }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">THe method to use.</param> public void Process(FileInfo outputFile, IMLMethod method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); IMLData output; int outputLength = _analyst.DetermineTotalInputFieldCount(); StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _outputColumns); double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst, _analystHeaders, csv, outputLength, true); if (_series.TotalDepth > 1) { inputArray = _series.Process(inputArray); } if (inputArray != null) { IMLData input = new BasicMLData(inputArray); // evaluation data if ((method is IMLClassification) && !(method is IMLRegression)) { // classification only? output = new BasicMLData(1); output[0] = ((IMLClassification) method).Classify(input); } else { // regression output = ((IMLRegression) method).Compute(input); } // skip file data int index = _fileColumns; int outputIndex = 0; // display output foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields) { if (_analystHeaders.Find(field.Name) != -1) { if (field.Output) { if (field.Classify) { // classification ClassItem cls = field.DetermineClass( outputIndex, output.Data); outputIndex += field.ColumnsNeeded; if (cls == null) { row.Data[index++] = "?Unknown?"; } else { row.Data[index++] = cls.Name; } } else { // regression double n = output[outputIndex++]; n = field.DeNormalize(n); row.Data[index++] = Format .Format(n, Precision); } } } } } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
private void button1_Click(object sender, EventArgs e) { openFileDialog1 = new OpenFileDialog(); openFileDialog1.InitialDirectory = ("c:\\"); openFileDialog1.Filter = ("txt files (*.csv)|*.csv|All files (*.*)|*.*"); openFileDialog1.FilterIndex = (2); openFileDialog1.RestoreDirectory = (true); this.Visible = false; DialogResult result = this.openFileDialog1.ShowDialog(); // Show the dialog. if (result == DialogResult.OK) // Test result. { string file = openFileDialog1.FileName; try { Chosenfile = file; format = FormatDictionary[CSVFormatsCombo.Text]; foreach (string item in MarketDataTypesListBox.SelectedItems) { TypesLoaded.Add((MarketDataType) Enum.Parse(typeof(MarketDataType),item)); } ReadCSV csv = new ReadCSV(Chosenfile, true, format); var ColQuery = from Names in csv.ColumnNames select new {Names}; //ComboBox comboxTypes = new ComboBox(); // comboxTypes.Items.Add("DateTime"); // comboxTypes.Items.Add("Double"); // comboxTypes.Items.Add("Skip"); // comboxTypes.SelectedIndex = 0; // DataGridViewRow dr = new DataGridViewRow(); // DataGridViewComboBoxCell CellGrids = new DataGridViewComboBoxCell(); // foreach (string item in comboxTypes.Items) // { // CellGrids.Items.Add(item); // } // dr.Cells.Add(CellGrids); // //newColumnsSetup.dataGridView1.Rows.Add(dr); // DataGridViewColumn cols = new DataGridViewColumn(CellGrids); // cols.Name = "Combo"; // newColumnsSetup.dataGridView1.Columns.Add(cols); //DataGridViewColumn aCol = new DataGridViewColumn(); //foreach (DataGridViewRow item in newColumnsSetup.dataGridView1.Rows) //{ // DataGridViewComboBoxCell cell = (DataGridViewComboBoxCell)(item.Cells[0]); //} } catch (Exception ex) { toolStripStatusLabel1.Text = "Error Loading the CSV:" + ex.Message; } } }
/// <summary> /// Load the buffer from the underlying file. /// </summary> /// /// <param name="csv">The CSV file to load from.</param> private void LoadBuffer(ReadCSV csv) { for (int i = 0; i < _buffer.Length; i++) { _buffer[i] = null; } int index = 0; while (csv.Next() && (index < _bufferSize) && !ShouldStop()) { var row = new LoadedRow(csv); _buffer[index++] = row; } _remaining = index; }
/// <summary> /// Get the next row from the underlying CSV file. /// </summary> /// /// <param name="csv">The underlying CSV file.</param> /// <returns>The loaded row.</returns> private LoadedRow GetNextRow(ReadCSV csv) { if (_remaining == 0) { LoadBuffer(csv); } while (_remaining > 0) { int index = RangeRandomizer.RandomInt(0, _bufferSize - 1); if (_buffer[index] != null) { LoadedRow result = _buffer[index]; _buffer[index] = null; _remaining--; return result; } } return null; }
/// <summary> /// Process, and generate the output file. /// </summary> /// /// <param name="outputFile">The output file.</param> public void Process(FileInfo outputFile) { ValidateAnalyzed(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); LoadedRow row; StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); while ((row = GetNextRow(csv)) != null) { WriteRow(tw, row); UpdateStatus(false); } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Process the input file. /// </summary> /// /// <param name="outputFile">The output file to write to.</param> public void Process(FileInfo outputFile) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, InputFormat); StreamWriter tw = PrepareOutputFile(outputFile); _filteredCount = 0; ResetStatus(); while (csv.Next() && !ShouldStop()) { UpdateStatus(false); var row = new LoadedRow(csv); if (ShouldProcess(row)) { WriteRow(tw, row); _filteredCount++; } } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">The method to use.</param> public void Process(FileInfo outputFile, IMLRegression method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); if (method.InputCount > _inputCount) { throw new AnalystError("This machine learning method has " + method.InputCount + " inputs, however, the data has " + _inputCount + " inputs."); } var input = new BasicMLData(method.InputCount); StreamWriter tw = AnalystPrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _idealCount); int dataIndex = 0; // load the input data for (int i = 0; i < _inputCount; i++) { String str = row.Data[i]; double d = Format.Parse(str); input[i] = d; dataIndex++; } // do we need to skip the ideal values? dataIndex += _idealCount; // compute the result IMLData output = method.Compute(input); // display the computed result for (int i = 0; i < _outputCount; i++) { double d = output[i]; row.Data[dataIndex++] = Format.Format(d, Precision); } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Process and balance the data. /// </summary> /// <param name="outputFile">The output file to write data to.</param> /// <param name="targetField"></param> /// <param name="countPer">The desired count per class.</param> public void Process(FileInfo outputFile, int targetField, int countPer) { ValidateAnalyzed(); StreamWriter tw = PrepareOutputFile(outputFile); _counts = new Dictionary<String, Int32>(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ResetStatus(); while (csv.Next() && !ShouldStop()) { var row = new LoadedRow(csv); UpdateStatus(false); String key = row.Data[targetField]; int count; if (!_counts.ContainsKey(key)) { count = 0; } else { count = _counts[key]; } if (count < countPer) { WriteRow(tw, row); count++; } _counts[key] = count; } ReportDone(false); csv.Close(); tw.Close(); }