/// <summary> /// Read the input file. /// </summary> private void ReadInputFile() { ResetStatus(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); while (csv.Next() && !ShouldStop()) { UpdateStatus("Reading input file"); var row = new LoadedRow(csv); _data.Add(row); } Count = csv.ColumnCount; if (ExpectInputHeaders) { InputHeadings = new String[csv.ColumnCount]; for (int i = 0; i < csv.ColumnCount; i++) { InputHeadings[i] = csv.ColumnNames[i]; } } csv.Close(); }
/// <summary> /// Process the file and cluster. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="clusters">The number of clusters.</param> /// <param name="theAnalyst">The analyst to use.</param> /// <param name="iterations">The number of iterations to use.</param> public void Process(FileInfo outputFile, int clusters, EncogAnalyst theAnalyst, int iterations) { StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); var cluster = new KMeansClustering(clusters, _data); cluster.Iteration(iterations); int clusterNum = 0; foreach (IMLCluster cl in cluster.Clusters) { foreach (IMLData item in cl.Data) { var row = (ClusterRow)item; int clsIndex = row.Input.Count - 1; LoadedRow lr = row.Row; lr.Data[clsIndex] = "" + clusterNum; WriteRow(tw, lr); } clusterNum++; } ReportDone(false); tw.Close(); }
/// <summary> /// Process the input file and segregate into the output files. /// </summary> /// public void Process() { Validate(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, InputFormat); ResetStatus(); foreach (SegregateTargetPercent target in _targets) { StreamWriter tw = PrepareOutputFile(target.Filename); while ((target.NumberRemaining > 0) && csv.Next() && !ShouldStop()) { UpdateStatus(false); var row = new LoadedRow(csv); WriteRow(tw, row); target.NumberRemaining = target.NumberRemaining - 1; } tw.Close(); } ReportDone(false); csv.Close(); }
public void LoadRow(LoadedRow row) { data.Insert(0, row); if (data.Count > totalWindowSize) { data.RemoveAt(data.Count - 1); } }
/// <summary> /// Process the file. /// </summary> /// <param name="outputFile">The output file.</param> /// <param name="method">The method to use.</param> public void Process(FileInfo outputFile, IMLRegression method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); if (method.InputCount != _inputCount) { throw new AnalystError("This machine learning method has " + method.InputCount + " inputs, however, the data has " + _inputCount + " inputs."); } var input = new BasicMLData(method.InputCount); StreamWriter tw = AnalystPrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _idealCount); int dataIndex = 0; // load the input data for (int i = 0; i < _inputCount; i++) { String str = row.Data[i]; double d = Format.Parse(str); input[i] = d; dataIndex++; } // do we need to skip the ideal values? dataIndex += _idealCount; // compute the result IMLData output = method.Compute(input); // display the computed result for (int i = 0; i < _outputCount; i++) { double d = output[i]; row.Data[dataIndex++] = Format.Format(d, Precision); } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Analyze the data. This counts the records and prepares the data to be /// processed. /// </summary> /// /// <param name="theAnalyst">The analyst to use.</param> /// <param name="inputFile">The input file to analyze.</param> /// <param name="headers">True, if the input file has headers.</param> /// <param name="format">The format of the input file.</param> public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { InputFilename = inputFile; ExpectInputHeaders = headers; InputFormat = format; Analyzed = true; _analyst = theAnalyst; if (OutputFormat == null) { OutputFormat = InputFormat; } _data = new BasicMLDataSet(); ResetStatus(); int recordCount = 0; int outputLength = _analyst.DetermineTotalColumns(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, InputFormat); ReadHeaders(csv); _analystHeaders = new CSVHeaders(InputHeadings); while (csv.Next() && !ShouldStop()) { UpdateStatus(true); var row = new LoadedRow(csv, 1); double[] inputArray = AnalystNormalizeCSV.ExtractFields( _analyst, _analystHeaders, csv, outputLength, true); var input = new ClusterRow(inputArray, row); _data.Add(input); recordCount++; } RecordCount = recordCount; Count = csv.ColumnCount; ReadHeaders(csv); csv.Close(); ReportDone(true); }
/// <summary> /// Load the buffer from the underlying file. /// </summary> /// <param name="csv">The CSV file to load from.</param> private void LoadBuffer(ReadCSV csv) { for (int i = 0; i < _buffer.Length; i++) { _buffer[i] = null; } int index = 0; while (csv.Next() && (index < _bufferSize) && !ShouldStop()) { var row = new LoadedRow(csv); _buffer[index++] = row; } _remaining = index; }
/// <summary> /// Get the next row from the underlying CSV file. /// </summary> /// <param name="csv">The underlying CSV file.</param> /// <returns>The loaded row.</returns> private LoadedRow GetNextRow(ReadCSV csv) { if (_remaining == 0) { LoadBuffer(csv); } while (_remaining > 0) { int index = RangeRandomizer.RandomInt(0, _bufferSize - 1); if (_buffer[index] != null) { LoadedRow result = _buffer[index]; _buffer[index] = null; _remaining--; return(result); } } return(null); }
/// <summary> /// Process and balance the data. /// </summary> /// <param name="outputFile">The output file to write data to.</param> /// <param name="targetField"></param> /// <param name="countPer">The desired count per class.</param> public void Process(FileInfo outputFile, int targetField, int countPer) { ValidateAnalyzed(); StreamWriter tw = PrepareOutputFile(outputFile); _counts = new Dictionary <String, Int32>(); var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); ResetStatus(); while (csv.Next() && !ShouldStop()) { var row = new LoadedRow(csv); UpdateStatus(false); String key = row.Data[targetField]; int count; if (!_counts.ContainsKey(key)) { count = 0; } else { count = _counts[key]; } if (count < countPer) { WriteRow(tw, row); count++; } _counts[key] = count; } ReportDone(false); csv.Close(); tw.Close(); }
/// <summary> /// Process the input file. /// </summary> /// <param name="outputFile">The output file to write to.</param> public void Process(FileInfo outputFile) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); StreamWriter tw = PrepareOutputFile(outputFile); _filteredCount = 0; ResetStatus(); while (csv.Next() && !ShouldStop()) { UpdateStatus(false); var row = new LoadedRow(csv); if (ShouldProcess(row)) { WriteRow(tw, row); _filteredCount++; } } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Determine if the specified row should be processed, or not. /// </summary> /// <param name="row">The row.</param> /// <returns>True, if the row should be processed.</returns> private bool ShouldProcess(LoadedRow row) { return(_excludedFields.All(field => !row.Data[field.FieldNumber].Trim().Equals(field.FieldValue.Trim()))); }
/// <summary> /// Process the file. /// </summary> /// /// <param name="outputFile">The output file.</param> /// <param name="method">THe method to use.</param> public void Process(FileInfo outputFile, IMLMethod method) { var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format); IMLData output; foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields) { field.Init(); } int outputLength = _analyst.DetermineTotalInputFieldCount(); StreamWriter tw = PrepareOutputFile(outputFile); ResetStatus(); while (csv.Next()) { UpdateStatus(false); var row = new LoadedRow(csv, _outputColumns); double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst, _analystHeaders, csv, outputLength, true); if (_series.TotalDepth > 1) { inputArray = _series.Process(inputArray); } if (inputArray != null) { IMLData input = new BasicMLData(inputArray); // evaluation data if ((method is IMLClassification) && !(method is IMLRegression)) { // classification only? var tmp = new BasicMLData(1); tmp[0] = ((IMLClassification)method).Classify(input); output = tmp; } else { // regression output = ((IMLRegression)method).Compute(input); } // skip file data int index = _fileColumns; int outputIndex = 0; // display output foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields) { if (_analystHeaders.Find(field.Name) != -1) { if (field.Output) { if (field.Classify) { // classification ClassItem cls = field.DetermineClass( outputIndex, output); outputIndex += field.ColumnsNeeded; if (cls == null) { row.Data[index++] = "?Unknown?"; } else { row.Data[index++] = cls.Name; } } else { // regression double n = output[outputIndex++]; n = field.DeNormalize(n); row.Data[index++] = Format .Format(n, Precision); } } } } } WriteRow(tw, row); } ReportDone(false); tw.Close(); csv.Close(); }
/// <summary> /// Construct the cluster row. /// </summary> /// /// <param name="input">The input data.</param> /// <param name="theRow">The CSV row.</param> public ClusterRow(double[] input, LoadedRow theRow) : base(new BasicMLData(input)) { _row = theRow; }
public bool LoadMap() { //Check If File Exists if (File.Exists("SaveData.txt")) { //Create Reader to path StreamReader reader = File.OpenText("SaveData.txt"); //Read Map Width and length mapLength = Convert.ToInt32(reader.ReadLine()); mapWidth = Convert.ToInt32(reader.ReadLine()); //Temporary map to load into MapTile[,] TempMap = new MapTile[mapLength, mapWidth]; //Skip White Space reader.ReadLine(); //Iterate Through Map Reading Each Character for (int i = 0; i < mapLength - 1; i++) { //save this row as string string LoadedRow; LoadedRow = reader.ReadLine(); //Convert Loaded Row Into a Character array // || // \/ // Problem char[] LoadedTiles = LoadedRow.ToCharArray(); if (i >= 0 && i < LoadedTiles.Length) { Console.WriteLine(LoadedTiles[i]); } //Go through the row Checking the character and loading in the Corrisponding Tile for (int j = 0; j < mapWidth - 1; j++) { //Set maps player to this new Temporary Player if (LoadedTiles[j] == 'P') { Player tempPlayer = new Player(); tempPlayer.PlayerY = j; tempPlayer.PlayerX = i; player = tempPlayer; TempMap[i, j] = player; } else if (LoadedTiles[j] == '-') { TempMap[i, j] = new MapTile(); } else if (LoadedTiles[j] == 'M') { TempMap[i, j] = new Monster(); } else if (LoadedTiles[j] == 'S') { TempMap[i, j] = new Shop(); } else { TempMap[i, j] = new MapTile(); } } } Console.WriteLine("Save loaded!"); //Close Reader reader.Close(); //Set Map to the map we just loaded map = TempMap; return(true); } //No Save File was Found else { Console.WriteLine("No save Found"); Console.ReadKey(); return(false); } }