Next() публичный Метод

Read the next line.
public Next ( ) : bool
Результат bool
Пример #1
0
        protected override void LoadTestData(string testFile)
        {
            ReadCSV test_csv = new ReadCSV(testFile, true, CSVFormat.DecimalPoint);

            List<double[]> test_input = new List<double[]>();
            test_input_orig = new List<double[]>();

            while (test_csv.Next())
            {
                double x = test_csv.GetDouble(0);

                test_input.Add(new[] { x });
                test_input_orig.Add(new[] { x });
            }

            test_csv.Close();

            //Analyze(ref test_input);
            Normalize(ref test_input, ref vmin, ref vmax);

            testData = new List<IMLData>();
            foreach (var d in test_input)
            {
                testData.Add(new BasicMLData(d));
            }
        }
Пример #2
0
        /// <summary>
        /// Load a CSV file into a memory dataset.  
        /// </summary>
        ///
        /// <param name="format">The CSV format to use.</param>
        /// <param name="filename">The filename to load.</param>
        /// <param name="headers">True if there is a header line.</param>
        /// <param name="inputSize">The input size.  Input always comes first in a file.</param>
        /// <param name="idealSize">The ideal size, 0 for unsupervised.</param>
        /// <returns>A NeuralDataSet that holds the contents of the CSV file.</returns>
        public static IMLDataSet LoadCSVTOMemory(CSVFormat format, String filename,
                                                bool headers, int inputSize, int idealSize)
        {
            var result = new BasicMLDataSet();
            var csv = new ReadCSV(filename, headers, format);
            while (csv.Next())
            {
                BasicMLData ideal = null;
                int index = 0;

                var input = new BasicMLData(inputSize);
                for (int i = 0; i < inputSize; i++)
                {
                    double d = csv.GetDouble(index++);
                    input[i] = d;
                }

                if (idealSize > 0)
                {
                    ideal = new BasicMLData(idealSize);
                    for (int i = 0; i < idealSize; i++)
                    {
                        double d = csv.GetDouble(index++);
                        ideal[i] = d;
                    }
                }

                IMLDataPair pair = new BasicMLDataPair(input, ideal);
                result.Add(pair);
            }

            return result;
        }
Пример #3
0
        /// <summary>
        /// Process the file and output to the target file.
        /// </summary>
        /// <param name="target">The target file to write to.</param>
        public void Process(string target)
        {
            var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format);
            TextWriter tw = new StreamWriter(target);

            ResetStatus();
            while (csv.Next())
            {
                var line = new StringBuilder();
                UpdateStatus(false);
                line.Append(GetColumnData(FileData.Date, csv));
                line.Append(" ");
                line.Append(GetColumnData(FileData.Time, csv));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Open, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.High, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Low, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Close, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Volume, csv)), Precision));

                tw.WriteLine(line.ToString());
            }
            ReportDone(false);
            csv.Close();
            tw.Close();
        }
Пример #4
0
        public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IList<MarketDataType> neededTypes, DateTime from, DateTime to, string File)
        {
            try
            {
                //We got a file, lets load it.
                ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                ReadCSV csv = new ReadCSV(File, true, CSVFormat.English);
                csv.DateFormat = "yyyy.MM.dd HH:mm:ss";

                DateTime ParsedDate = from;


                //  Time,Open,High,Low,Close,Volume
                while (csv.Next() && ParsedDate >= from && ParsedDate <= to  )
                {
                    DateTime date = csv.GetDate("Time");
                    double Bid= csv.GetDouble("Bid");
                    double Ask = csv.GetDouble("Ask");
                    double AskVolume = csv.GetDouble("AskVolume");
                    double BidVolume= csv.GetDouble("BidVolume");
                    double _trade = ( Bid + Ask ) /2;
                    double _tradeSize = (AskVolume + BidVolume) / 2;
                    LoadedMarketData data = new LoadedMarketData(date, symbol);
                    data.SetData(MarketDataType.Trade, _trade);
                    data.SetData(MarketDataType.Volume, _tradeSize);
                    result.Add(data);

                    Console.WriteLine("Current DateTime:"+ParsedDate.ToShortDateString()+ " Time:"+ParsedDate.ToShortTimeString() +"  Start date was "+from.ToShortDateString());
                    Console.WriteLine("Stopping at date:" + to.ToShortDateString() );
                    ParsedDate = date;
                    //double open = csv.GetDouble("Open");
                    //double close = csv.GetDouble("High");
                    //double high = csv.GetDouble("Low");
                    //double low = csv.GetDouble("Close");
                    //double volume = csv.GetDouble("Volume");
                    //LoadedMarketData data = new LoadedMarketData(date, symbol);
                    //data.SetData(MarketDataType.Open, open);
                    //data.SetData(MarketDataType.High, high);
                    //data.SetData(MarketDataType.Low, low);
                    //data.SetData(MarketDataType.Close, close);
                    //data.SetData(MarketDataType.Volume, volume);
                    result.Add(data);
                }

                csv.Close();
                return result;
            }

            catch (Exception ex)
            {

                Console.WriteLine("Something went wrong reading the csv");
                Console.WriteLine("Something went wrong reading the csv:" + ex.Message);
            }

            Console.WriteLine("Something went wrong reading the csv");
            return null;
        }
Пример #5
0
        /// <summary>
        /// Reads the CSV and call loader.
        /// Used internally to load the csv and place data in the marketdataset.
        /// </summary>
        /// <param name="symbol">The symbol.</param>
        /// <param name="neededTypes">The needed types.</param>
        /// <param name="from">From.</param>
        /// <param name="to">To.</param>
        /// <param name="File">The file.</param>
        /// <returns></returns>
        ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IEnumerable<MarketDataType> neededTypes, DateTime from, DateTime to, string File)
        {
                //We got a file, lets load it.

                ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                ReadCSV csv = new ReadCSV(File, true, CSVFormat.English);
                //In case we want to use a different date format...and have used the SetDateFormat method, our DateFormat must then not be null..
                //We will use the ?? operator to check for nullables.
                csv.DateFormat = DateFormat ?? "yyyy-MM-dd HH:mm:ss";
                csv.TimeFormat = "HH:mm:ss";

                DateTime ParsedDate = from;
                bool writeonce = true;

                while (csv.Next())
                {
                    DateTime date = csv.GetDate(0);
                    ParsedDate = date;

                    if (writeonce)
                    {
                        Console.WriteLine(@"First parsed date in csv:" + ParsedDate.ToShortDateString());
                        Console.WriteLine(@"Stopping at date:" + to.ToShortDateString());
                        Console.WriteLine(@"Current DateTime:" + ParsedDate.ToShortDateString() + @" Time:" +
                                          ParsedDate.ToShortTimeString() + @"  Asked Start date was " +
                                          from.ToShortDateString());
                        writeonce = false;
                    }
                    if (ParsedDate >= from && ParsedDate <= to)
                    {
                        DateTime datex = csv.GetDate(0);
                        double open = csv.GetDouble(1);
                        double close = csv.GetDouble(2);
                        double high = csv.GetDouble(3);
                        double low = csv.GetDouble(4);
                        double volume = csv.GetDouble(5);
                        double range = Math.Abs(open - close);
                        double HighLowRange = Math.Abs(high - low);
                        double DirectionalRange = close - open;
                        LoadedMarketData data = new LoadedMarketData(datex, symbol);
                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.High, high);
                        data.SetData(MarketDataType.Low, low);
                        data.SetData(MarketDataType.Close, close);
                        data.SetData(MarketDataType.Volume, volume);
                        data.SetData(MarketDataType.RangeHighLow, Math.Round(HighLowRange, 6));
                        data.SetData(MarketDataType.RangeOpenClose, Math.Round(range, 6));
                        data.SetData(MarketDataType.RangeOpenCloseNonAbsolute, Math.Round(DirectionalRange, 6));
                        result.Add(data);


                    }

                }

                csv.Close();
                return result;
        }
Пример #6
0
 /// <summary>
 /// parses one column of a csv and returns an array of doubles.
 /// you can only return one double array with this method.
 /// </summary>
 /// <param name="file">The file.</param>
 /// <param name="formatused">The formatused.</param>
 /// <param name="Name">The name of the column to parse..</param>
 /// <returns></returns>
 public static List<double> QuickParseCSV(string file, CSVFormat formatused, string Name)
 {
     List<double> returnedArrays = new List<double>();
     ReadCSV csv = new ReadCSV(file, true, formatused);
     while (csv.Next())
     {
         returnedArrays.Add(csv.GetDouble(Name));
     }
     return returnedArrays;
 }
Пример #7
0
 /// <summary>
 /// parses one column of a csv and returns an array of doubles.
 /// you can only return one double array with this method.
 /// We are assuming CSVFormat english in this quick parse csv method.
 /// You can input the size (number of lines) to read.
 /// </summary>
 /// <param name="file">The file.</param>
 /// <param name="Name">The name of the column to parse.</param>
 /// <param name="size">The size.</param>
 /// <returns></returns>
 public static List<double> QuickParseCSV(string file, string Name, int size)
 {
     List<double> returnedArrays = new List<double>();
     ReadCSV csv = new ReadCSV(file, true, CSVFormat.English);
     int currentRead = 0;
     while (csv.Next() && currentRead < size)
     {
         returnedArrays.Add(csv.GetDouble(Name));
         currentRead++;
     }
     return returnedArrays;
 }
Пример #8
0
        public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IList<MarketDataType> neededTypes, DateTime from, DateTime to,string File)
        {
            try
            {


                        //We got a file, lets load it.

                    

                        ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                        ReadCSV csv = new ReadCSV(File, true,LoadedFormat);


                        csv.DateFormat = DateTimeFormat.Normalize();
                        //  Time,Open,High,Low,Close,Volume
                        while (csv.Next())
                        {
                            DateTime date = csv.GetDate("Time");
                            double open = csv.GetDouble("Open");
                            double close = csv.GetDouble("High");
                            double high = csv.GetDouble("Low");
                            double low = csv.GetDouble("Close");
                            double volume = csv.GetDouble("Volume");
                            LoadedMarketData data = new LoadedMarketData(date, symbol);
                            data.SetData(MarketDataType.Open, open);
                            data.SetData(MarketDataType.High, high);
                            data.SetData(MarketDataType.Low, low);
                            data.SetData(MarketDataType.Close, close);
                            data.SetData(MarketDataType.Volume, volume);
                            result.Add(data);
                        }

                        csv.Close();
                        return result;                 
                }
            
            catch (Exception ex)
            {
                
              Console.WriteLine("Something went wrong reading the csv");
              Console.WriteLine("Something went wrong reading the csv:"+ex.Message);
            }

            Console.WriteLine("Something went wrong reading the csv");
            return null;
        }
Пример #9
0
        /// <summary>
        /// Load financial data from a CSV file.
        /// </summary>
        /// <param name="ticker">The ticker being loaded, ignored for a CSV load.</param>
        /// <param name="dataNeeded">The data needed.</param>
        /// <param name="from">The starting date.</param>
        /// <param name="to">The ending date.</param>
        /// <returns></returns>
        public ICollection<LoadedMarketData> Load(TickerSymbol ticker, IList<MarketDataType> dataNeeded, DateTime from,
                                                  DateTime to)
        {
            try
            {
                if (File.Exists(TheFile))
                {
                    //We got a file, lets load it.
                    TheFile = TheFile;
                    ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                    var csv = new ReadCSV(TheFile, true, CSVFormat.English);

                    //  Time,Open,High,Low,Close,Volume
                    while (csv.Next())
                    {
                        DateTime date = csv.GetDate("Time");
                        double open = csv.GetDouble("Open");
                        double close = csv.GetDouble("High");
                        double high = csv.GetDouble("Low");
                        double low = csv.GetDouble("Close");
                        double volume = csv.GetDouble("Volume");
                        var data = new LoadedMarketData(date, ticker);
                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.Volume, close);
                        data.SetData(MarketDataType.High, high);
                        data.SetData(MarketDataType.Low, low);
                        data.SetData(MarketDataType.Volume, volume);
                        result.Add(data);
                    }

                    csv.Close();
                    return result;
                }
            }
            catch (Exception ex)
            {
                throw new LoaderError(ex);
            }

            throw new LoaderError(@"Something went wrong reading the csv");
        }
Пример #10
0
        /// <summary>
        /// Load financial data from Google.
        /// </summary>
        /// <param name="ticker">The ticker to load from.</param>
        /// <param name="dataNeeded">The data needed.</param>
        /// <param name="from">The starting time.</param>
        /// <param name="to">The ending time.</param>
        /// <returns>The loaded data.</returns>
        public ICollection<LoadedMarketData> Load(TickerSymbol ticker, IList<MarketDataType> dataNeeded, DateTime from,
                                                  DateTime to)
        {
            ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
            Uri url = BuildUrl(ticker, from, to);
            WebRequest http = WebRequest.Create(url);
            var response = (HttpWebResponse) http.GetResponse();

            if (response != null)
                using (Stream istream = response.GetResponseStream())
                {
                    var csv = new ReadCSV(istream, true, CSVFormat.DecimalPoint);

                    while (csv.Next())
                    {
                        DateTime date = csv.GetDate("date");

                        double open = csv.GetDouble("open");
                        double close = csv.GetDouble("close");
                        double high = csv.GetDouble("high");
                        double low = csv.GetDouble("low");
                        double volume = csv.GetDouble("volume");

                        var data =
                            new LoadedMarketData(date, ticker);

                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.Close, close);
                        data.SetData(MarketDataType.High, high);
                        data.SetData(MarketDataType.Low, low);
                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.Volume, volume);
                        result.Add(data);
                    }

                    csv.Close();
                    if (istream != null) istream.Close();
                }
            return result;
        }
Пример #11
0
        /// <summary>
        /// Construct the object.
        /// </summary>
        ///
        /// <param name="filename">The filename.</param>
        /// <param name="headers">False if headers are not extended.</param>
        /// <param name="format">The CSV format.</param>
        public CSVHeaders(FileInfo filename, bool headers,
                          CSVFormat format)
        {
            _headerList = new List<String>();
            _columnMapping = new Dictionary<String, Int32>();
            ReadCSV csv = null;
            try
            {
                csv = new ReadCSV(filename.ToString(), headers, format);
                if (csv.Next())
                {
                    if (headers)
                    {
                        foreach (String str  in  csv.ColumnNames)
                        {
                            _headerList.Add(str);
                        }
                    }
                    else
                    {
                        for (int i = 0; i < csv.ColumnCount; i++)
                        {
                            _headerList.Add("field:" + (i + 1));
                        }
                    }
                }

                Init();
            }
            finally
            {
                if (csv != null)
                {
                    csv.Close();
                }
            }
        }
        /// <summary>
        /// Used to calibrate the training file. 
        /// </summary>
        /// <param name="file">The file to consider.</param>
        protected void CalibrateFile(string file)
        {
            var csv = new ReadCSV(file, true, CSVFormat.English);
            while (csv.Next())
            {
                var a = new double[1];
                double close = csv.GetDouble(1);

                const int fastIndex = 2;
                const int slowIndex = fastIndex + Config.InputWindow;
                a[0] = close;
                for (int i = 0; i < Config.InputWindow; i++)
                {
                    double fast = csv.GetDouble(fastIndex + i);
                    double slow = csv.GetDouble(slowIndex + i);

                    if (!double.IsNaN(fast) && !double.IsNaN(slow))
                    {
                        double diff = (fast - slow)/Config.PipSize;
                        _minDifference = Math.Min(_minDifference, diff);
                        _maxDifference = Math.Max(_maxDifference, diff);
                    }
                }
                _window.Add(a);

                if (_window.IsFull())
                {
                    double max = (_window.CalculateMax(0, Config.InputWindow) - close)/Config.PipSize;
                    double min = (_window.CalculateMin(0, Config.InputWindow) - close)/Config.PipSize;

                    double o = Math.Abs(max) > Math.Abs(min) ? max : min;

                    _maxPiPs = Math.Max(_maxPiPs, (int) o);
                    _minPiPs = Math.Min(_minPiPs, (int) o);
                }
            }
        }
        /// <summary>
        ///     Process and balance the data.
        /// </summary>
        /// <param name="outputFile">The output file to write data to.</param>
        /// <param name="targetField"></param>
        /// <param name="countPer">The desired count per class.</param>
        public void Process(FileInfo outputFile, int targetField,
                            int countPer)
        {
            ValidateAnalyzed();
            StreamWriter tw = PrepareOutputFile(outputFile);

            _counts = new Dictionary<String, Int32>();

            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

            ResetStatus();
            while (csv.Next() && !ShouldStop())
            {
                var row = new LoadedRow(csv);
                UpdateStatus(false);
                String key = row.Data[targetField];
                int count;
                if (!_counts.ContainsKey(key))
                {
                    count = 0;
                }
                else
                {
                    count = _counts[key];
                }

                if (count < countPer)
                {
                    WriteRow(tw, row);
                    count++;
                }

                _counts[key] = count;
            }
            ReportDone(false);
            csv.Close();
            tw.Close();
        }
Пример #14
0
        /// <summary>
        /// Load the buffer from the underlying file.
        /// </summary>
        ///
        /// <param name="csv">The CSV file to load from.</param>
        private void LoadBuffer(ReadCSV csv)
        {
            for (int i = 0; i < _buffer.Length; i++)
            {
                _buffer[i] = null;
            }

            int index = 0;
            while (csv.Next() && (index < _bufferSize) && !ShouldStop())
            {
                var row = new LoadedRow(csv);
                _buffer[index++] = row;
            }

            _remaining = index;
        }
        /// <summary>
        ///     Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            // Download the data that we will attempt to model.
            string filename = DownloadData(app.Args);

            // Define the format of the data file.
            // This area will change, depending on the columns and 
            // format of the file that you are trying to model.
            var format = new CSVFormat('.', ' '); // decimal point and space separated
            IVersatileDataSource source = new CSVDataSource(filename, false, format);

            var data = new VersatileMLDataSet(source);
            data.NormHelper.Format = format;

            ColumnDefinition columnMPG = data.DefineSourceColumn("mpg", 0, ColumnType.Continuous);
            ColumnDefinition columnCylinders = data.DefineSourceColumn("cylinders", 1, ColumnType.Ordinal);
            // It is very important to predefine ordinals, so that the order is known.
            columnCylinders.DefineClass(new[] {"3", "4", "5", "6", "8"});
            data.DefineSourceColumn("displacement", 2, ColumnType.Continuous);
            ColumnDefinition columnHorsePower = data.DefineSourceColumn("horsepower", 3, ColumnType.Continuous);
            data.DefineSourceColumn("weight", 4, ColumnType.Continuous);
            data.DefineSourceColumn("acceleration", 5, ColumnType.Continuous);
            ColumnDefinition columnModelYear = data.DefineSourceColumn("model_year", 6, ColumnType.Ordinal);
            columnModelYear.DefineClass(new[]
            {"70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "80", "81", "82"});
            data.DefineSourceColumn("origin", 7, ColumnType.Nominal);

            // Define how missing values are represented.
            data.NormHelper.DefineUnknownValue("?");
            data.NormHelper.DefineMissingHandler(columnHorsePower, new MeanMissingHandler());

            // Analyze the data, determine the min/max/mean/sd of every column.
            data.Analyze();

            // Map the prediction column to the output of the model, and all
            // other columns to the input.
            data.DefineSingleOutputOthersInput(columnMPG);

            // Create feedforward neural network as the model type. MLMethodFactory.TYPE_FEEDFORWARD.
            // You could also other model types, such as:
            // MLMethodFactory.SVM:  Support Vector Machine (SVM)
            // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network
            // MLMethodFactor.TYPE_NEAT: NEAT Neural Network
            // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network
            var model = new EncogModel(data);
            model.SelectMethod(data, MLMethodFactory.TypeFeedforward);

            // Send any output to the console.
            model.Report = new ConsoleStatusReportable();

            // Now normalize the data.  Encog will automatically determine the correct normalization
            // type based on the model you chose in the last step.
            data.Normalize();

            // Hold back some data for a final validation.
            // Shuffle the data into a random ordering.
            // Use a seed of 1001 so that we always use the same holdback and will get more consistent results.
            model.HoldBackValidation(0.3, true, 1001);

            // Choose whatever is the default training type for this model.
            model.SelectTrainingType(data);

            // Use a 5-fold cross-validated train.  Return the best method found.
            var bestMethod = (IMLRegression) model.Crossvalidate(5, true);

            // Display the training and validation errors.
            Console.WriteLine(@"Training error: " + model.CalculateError(bestMethod, model.TrainingDataset));
            Console.WriteLine(@"Validation error: " + model.CalculateError(bestMethod, model.ValidationDataset));

            // Display our normalization parameters.
            NormalizationHelper helper = data.NormHelper;
            Console.WriteLine(helper.ToString());

            // Display the final model.
            Console.WriteLine("Final model: " + bestMethod);

            // Loop over the entire, original, dataset and feed it through the model.
            // This also shows how you would process new data, that was not part of your
            // training set.  You do not need to retrain, simply use the NormalizationHelper
            // class.  After you train, you can save the NormalizationHelper to later
            // normalize and denormalize your data.
            source.Close();
            var csv = new ReadCSV(filename, false, format);
            var line = new String[7];
            IMLData input = helper.AllocateInputVector();

            while (csv.Next())
            {
                var result = new StringBuilder();

                line[0] = csv.Get(1);
                line[1] = csv.Get(2);
                line[2] = csv.Get(3);
                line[3] = csv.Get(4);
                line[4] = csv.Get(5);
                line[5] = csv.Get(6);
                line[6] = csv.Get(7);

                String correct = csv.Get(0);
                helper.NormalizeInputVector(line, ((BasicMLData) input).Data, false);
                IMLData output = bestMethod.Compute(input);
                String irisChosen = helper.DenormalizeOutputVectorToString(output)[0];

                result.Append(line);
                result.Append(" -> predicted: ");
                result.Append(irisChosen);
                result.Append("(correct: ");
                result.Append(correct);
                result.Append(")");

                Console.WriteLine(result.ToString());
            }
            csv.Close();

            // Delete data file and shut down.
            File.Delete(filename);
            EncogFramework.Instance.Shutdown();
        }
Пример #16
0
 private void x08af8e36ac9914b5()
 {
     ReadCSV dcsv = null;
     try
     {
         int num;
         double num2;
         dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat);
         goto Label_006B;
     Label_0021:
         num++;
         if ((((uint) num2) & 0) == 0)
         {
         }
     Label_005E:
         while (dcsv.Next())
         {
             if (!base.ShouldStop())
             {
                 goto Label_0075;
             }
             if ((((uint) num2) + ((uint) num)) <= uint.MaxValue)
             {
                 break;
             }
         }
         return;
     Label_006B:
         base.ResetStatus();
         num = 0;
         goto Label_005E;
     Label_0075:
         base.UpdateStatus("Reading data");
         using (IEnumerator<BaseCachedColumn> enumerator = base.Columns.GetEnumerator())
         {
             BaseCachedColumn column;
             FileData data;
         Label_008F:
             if (enumerator.MoveNext() || ((((uint) num) + ((uint) num)) > uint.MaxValue))
             {
                 goto Label_011D;
             }
             goto Label_0021;
         Label_00BD:
             if (column.Input)
             {
                 goto Label_00D8;
             }
             goto Label_008F;
         Label_00C7:
             if (0 == 0)
             {
             }
             goto Label_008F;
         Label_00CC:
             if (column is FileData)
             {
                 goto Label_00BD;
             }
             goto Label_00C7;
         Label_00D8:
             data = (FileData) column;
             string str = dcsv.Get(data.Index);
             num2 = base.InputFormat.Parse(str);
             data.Data[num] = num2;
             goto Label_008F;
         Label_0111:
             if (0 == 0)
             {
                 goto Label_00CC;
             }
             goto Label_00BD;
         Label_011D:
             column = enumerator.Current;
             goto Label_0111;
         }
     }
     finally
     {
         base.ReportDone("Reading data");
         if (dcsv != null)
         {
             dcsv.Close();
         }
     }
 }
Пример #17
0
        /// <summary>
        /// Reads and parses CSV data from file
        /// </summary>
        /// <param name="ticker">Ticker associated with CSV file</param>
        /// <param name="neededTypes">Columns to parse (headers)</param>
        /// <param name="from">DateTime from</param>
        /// <param name="to">DateTime to</param>
        /// <param name="File">Filepath to CSV</param>
        /// <returns>Marketdata</returns>
        public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol ticker, IList<MarketDataType> neededTypes, DateTime from, DateTime to, string File)
        {
            try
            {
                LoadedFile = File;
                Console.WriteLine("Loading instrument: " + ticker.Symbol + " from: " + File);
                //We got a file, lets load it.
                ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                ReadCSV csv = new ReadCSV(File, true, LoadedFormat);
                if (DateTimeDualColumn)
                {
                    csv.DateFormat = DateFormat;
                    csv.TimeFormat =  TimeFormat;
                }
                else
                {
                    csv.DateFormat = DateFormat;
                }

                //"Date","Time","Open","High","Low","Close","Volume"
                while (csv.Next())
                {
                    string datetime = "";
                    if (DateTimeDualColumn)
                    {
                        datetime = csv.GetDate("Date").ToShortDateString() + " " +
                                          csv.GetTime("Time").ToShortTimeString();
                    }
                    else
                    {
                        datetime = csv.GetDate("Date").ToShortDateString();
                    }
                    DateTime date = DateTime.Parse(datetime);
                    if (date > from && date < to)
                    {
                        // CSV columns
                        double open = csv.GetDouble("Open");
                        double high = csv.GetDouble("High");
                        double low = csv.GetDouble("Low");
                        double close = csv.GetDouble("Close");
                        double volume = csv.GetDouble("Volume");

                        LoadedMarketData data = new LoadedMarketData(date, ticker);
                        foreach (MarketDataType marketDataType in neededTypes)
                        {
                            switch (marketDataType.ToString())
                            {
                                case "Open":
                                    data.SetData(MarketDataType.Open, open);
                                    break;
                                case "High":
                                    data.SetData(MarketDataType.High, high);
                                    break;
                                case "Low":
                                    data.SetData(MarketDataType.Low, low);
                                    break;
                                case "Close":
                                    data.SetData(MarketDataType.Close, close);
                                    break;
                                case "Volume":
                                    data.SetData(MarketDataType.Volume, volume);
                                    break;
                                case "RangeHighLow":
                                    data.SetData(MarketDataType.RangeHighLow, Math.Round(Math.Abs(high - low), 6));
                                    break;
                                case "RangeOpenClose":
                                    data.SetData(MarketDataType.RangeOpenClose, Math.Round(Math.Abs(close - open), 6));
                                    break;
                                case "RangeOpenCloseNonAbsolute":
                                    data.SetData(MarketDataType.RangeOpenCloseNonAbsolute, Math.Round(close - open, 6));
                                    break;
                                case "Weighted":
                                    data.SetData(MarketDataType.Weighted, Math.Round((high + low + 2 * close) / 4, 6));
                                    break;
                            }
                        }
                        result.Add(data);
                    }
                }
                csv.Close();
                return result;
            }

            catch (Exception ex)
            {
                Console.WriteLine("Something went wrong reading the csv: " + ex.Message);
            }
            return null;
        }
Пример #18
0
 public void Process(EncogAnalyst target)
 {
     string text1;
     int num;
     int num2;
     string str;
     bool flag;
     bool flag2;
     bool flag3;
     AnalyzedField field3;
     int num3;
     IList<AnalystClassItem> analyzedClassMembers;
     IList<AnalystClassItem> classMembers;
     int num4;
     DataField[] fieldArray;
     int num5;
     AnalyzedField[] fieldArray2;
     int num6;
     AnalyzedField[] fieldArray3;
     int num7;
     AnalyzedField[] fieldArray4;
     int num8;
     CSVFormat format = ConvertStringConst.ConvertToCSVFormat(this._x5786461d089b10a0);
     ReadCSV dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format);
     Label_0676:
     if (dcsv.Next())
     {
         if (this._xa942970cc8a85fd4 == null)
         {
             this.xd2a854890d89a856(dcsv);
         }
         num = 0;
         while (num < dcsv.ColumnCount)
         {
             if (this._xa942970cc8a85fd4 != null)
             {
                 this._xa942970cc8a85fd4[num].Analyze1(dcsv.Get(num));
             }
             num++;
         }
         if (((uint) num2) >= 0)
         {
             goto Label_0676;
         }
     }
     else if (this._xa942970cc8a85fd4 != null)
     {
         fieldArray2 = this._xa942970cc8a85fd4;
     }
     else
     {
         if ((((uint) num8) & 0) == 0)
         {
             goto Label_05F5;
         }
         goto Label_05D0;
     }
     if ((((uint) num2) - ((uint) flag2)) >= 0)
     {
         for (num6 = 0; num6 < fieldArray2.Length; num6++)
         {
             fieldArray2[num6].CompletePass1();
         }
         goto Label_05F5;
     }
     goto Label_05D0;
     Label_0011:
     num5++;
     if (((uint) num2) < 0)
     {
         goto Label_0251;
     }
     Label_002C:
     if (num5 < fieldArray.Length)
     {
         fieldArray[num5] = this._xa942970cc8a85fd4[num5].FinalizeField();
         if ((((uint) num6) + ((uint) num)) <= uint.MaxValue)
         {
             goto Label_0011;
         }
         if ((((uint) num8) | 3) != 0)
         {
             goto Label_00E8;
         }
     }
     else
     {
         if (((uint) flag2) > uint.MaxValue)
         {
             goto Label_0336;
         }
         target.Script.Fields = fieldArray;
         return;
     }
     Label_00A6:
     if (this._xa942970cc8a85fd4.Length == target.Script.Fields.Length)
     {
         num3 = 0;
         goto Label_00EE;
     }
     if ((((uint) flag3) & 0) != 0)
     {
         goto Label_0248;
     }
     Label_00D7:
     fieldArray = new DataField[this._xa942970cc8a85fd4.Length];
     if ((((uint) num6) + ((uint) num4)) >= 0)
     {
         num5 = 0;
         goto Label_002C;
     }
     goto Label_0011;
     Label_00E8:
     num3++;
     Label_00EE:
     if (num3 < this._xa942970cc8a85fd4.Length)
     {
         this._xa942970cc8a85fd4[num3].Name = target.Script.Fields[num3].Name;
         if (!this._xa942970cc8a85fd4[num3].Class)
         {
             goto Label_00E8;
         }
         analyzedClassMembers = this._xa942970cc8a85fd4[num3].AnalyzedClassMembers;
         classMembers = target.Script.Fields[num3].ClassMembers;
         if (classMembers.Count != analyzedClassMembers.Count)
         {
             goto Label_00E8;
         }
         num4 = 0;
         if (((uint) num2) > uint.MaxValue)
         {
             goto Label_0341;
         }
         goto Label_0195;
     }
     goto Label_00D7;
     Label_018F:
     num4++;
     Label_0195:
     if (num4 < classMembers.Count)
     {
         if (analyzedClassMembers[num4].Code.Equals(classMembers[num4].Code))
         {
             analyzedClassMembers[num4].Name = classMembers[num4].Name;
         }
         goto Label_018F;
     }
     goto Label_00E8;
     Label_0238:
     if (num8 < fieldArray4.Length)
     {
         field3 = fieldArray4[num8];
         if ((((uint) num) & 0) != 0)
         {
             goto Label_02FF;
         }
         if (field3.Class)
         {
             if (flag)
             {
                 goto Label_0350;
             }
             if (((uint) num4) <= uint.MaxValue)
             {
                 goto Label_03E6;
             }
             goto Label_040F;
         }
         goto Label_0251;
     }
     if (target.Script.Fields != null)
     {
         goto Label_00A6;
     }
     if ((((uint) num8) + ((uint) num5)) > uint.MaxValue)
     {
         goto Label_0341;
     }
     goto Label_00D7;
     Label_0248:
     if (field3.Integer && (field3.AnalyzedClassMembers.Count <= 2))
     {
         if ((((uint) num6) - ((uint) num5)) >= 0)
         {
             if ((((uint) num7) | 4) == 0)
             {
                 goto Label_059B;
             }
             field3.Class = false;
         }
         else
         {
             if ((((uint) flag2) + ((uint) num8)) >= 0)
             {
                 goto Label_0350;
             }
             goto Label_02FF;
         }
     }
     Label_0251:
     num8++;
     goto Label_0238;
     Label_02FF:
     if (!flag2 && (field3.Real && !field3.Integer))
     {
         field3.Class = false;
     }
     goto Label_0248;
     Label_030B:
     if ((((uint) flag3) - ((uint) num2)) >= 0)
     {
         goto Label_02FF;
     }
     Label_0341:
     while (!field3.Real)
     {
         field3.Class = false;
         goto Label_02FF;
     Label_0336:
         if (field3.Integer)
         {
             goto Label_030B;
         }
     }
     goto Label_0368;
     Label_0350:
     if (flag3)
     {
         goto Label_02FF;
     }
     if ((((uint) num5) & 0) == 0)
     {
         goto Label_0336;
     }
     Label_0368:
     if ((((uint) flag) - ((uint) num4)) >= 0)
     {
         goto Label_02FF;
     }
     goto Label_030B;
     Label_03E6:
     if (!field3.Integer)
     {
         goto Label_0350;
     }
     Label_040F:
     field3.Class = false;
     if ((((uint) num7) + ((uint) flag)) < 0)
     {
         goto Label_05F5;
     }
     if ((((uint) num3) + ((uint) flag)) > uint.MaxValue)
     {
         goto Label_04D6;
     }
     if ((((uint) num3) & 0) == 0)
     {
         goto Label_0350;
     }
     goto Label_03E6;
     Label_04A8:
     dcsv.Close();
     Label_04AE:
     text1 = this._x594135906c55045c.Properties.GetPropertyString("SETUP:CONFIG_allowedClasses");
     if (text1 != null)
     {
         str = text1;
     }
     else
     {
         if (0 != 0)
         {
             goto Label_02FF;
         }
         str = "";
     }
     flag = str.Contains("int");
     if (str.Contains("real"))
     {
     }
     flag2 = true;
     flag3 = str.Contains("string");
     fieldArray4 = this._xa942970cc8a85fd4;
     num8 = 0;
     goto Label_0238;
     Label_04D6:
     if (num7 >= fieldArray3.Length)
     {
     }
     AnalyzedField field2 = fieldArray3[num7];
     if (((uint) num4) < 0)
     {
         goto Label_04AE;
     }
     field2.CompletePass2();
     num7++;
     if ((((uint) num4) + ((uint) num2)) < 0)
     {
         goto Label_018F;
     }
     if ((((uint) num3) + ((uint) flag)) >= 0)
     {
         goto Label_04D6;
     }
     Label_0554:
     if (((uint) flag) > uint.MaxValue)
     {
         goto Label_059B;
     }
     goto Label_04D6;
     if ((((uint) num2) | 0x7fffffff) != 0)
     {
         if ((((uint) flag2) - ((uint) num3)) >= 0)
         {
             goto Label_04A8;
         }
         goto Label_05D0;
     }
     Label_058B:
     num2++;
     Label_0591:
     if (num2 < dcsv.ColumnCount)
     {
         goto Label_05D0;
     }
     Label_059B:
     if (dcsv.Next())
     {
         num2 = 0;
         goto Label_0591;
     }
     if (this._xa942970cc8a85fd4 != null)
     {
         fieldArray3 = this._xa942970cc8a85fd4;
         num7 = 0;
         goto Label_0554;
     }
     goto Label_04A8;
     Label_05D0:
     if (this._xa942970cc8a85fd4 != null)
     {
         this._xa942970cc8a85fd4[num2].Analyze2(dcsv.Get(num2));
     }
     goto Label_058B;
     Label_05F5:
     dcsv.Close();
     if (((uint) num) >= 0)
     {
     }
     dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format);
     goto Label_059B;
 }
        /// <summary>
        ///     Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            ErrorCalculation.Mode = ErrorCalculationMode.RMS;
            // Download the data that we will attempt to model.
            string filename = DownloadData(app.Args);

            // Define the format of the data file.
            // This area will change, depending on the columns and
            // format of the file that you are trying to model.
            var format = new CSVFormat('.', ' '); // decimal point and
            // space separated
            IVersatileDataSource source = new CSVDataSource(filename, true,
                format);

            var data = new VersatileMLDataSet(source);
            data.NormHelper.Format = format;

            ColumnDefinition columnSSN = data.DefineSourceColumn("SSN",
                ColumnType.Continuous);
            ColumnDefinition columnDEV = data.DefineSourceColumn("DEV",
                ColumnType.Continuous);

            // Analyze the data, determine the min/max/mean/sd of every column.
            data.Analyze();

            // Use SSN & DEV to predict SSN. For time-series it is okay to have
            // SSN both as
            // an input and an output.
            data.DefineInput(columnSSN);
            data.DefineInput(columnDEV);
            data.DefineOutput(columnSSN);

            // Create feedforward neural network as the model type.
            // MLMethodFactory.TYPE_FEEDFORWARD.
            // You could also other model types, such as:
            // MLMethodFactory.SVM: Support Vector Machine (SVM)
            // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network
            // MLMethodFactor.TYPE_NEAT: NEAT Neural Network
            // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network
            var model = new EncogModel(data);
            model.SelectMethod(data, MLMethodFactory.TypeFeedforward);

            // Send any output to the console.
            model.Report = new ConsoleStatusReportable();

            // Now normalize the data. Encog will automatically determine the
            // correct normalization
            // type based on the model you chose in the last step.
            data.Normalize();

            // Set time series.
            data.LeadWindowSize = 1;
            data.LagWindowSize = WindowSize;

            // Hold back some data for a final validation.
            // Do not shuffle the data into a random ordering. (never shuffle
            // time series)
            // Use a seed of 1001 so that we always use the same holdback and
            // will get more consistent results.
            model.HoldBackValidation(0.3, false, 1001);

            // Choose whatever is the default training type for this model.
            model.SelectTrainingType(data);

            // Use a 5-fold cross-validated train. Return the best method found.
            // (never shuffle time series)
            var bestMethod = (IMLRegression) model.Crossvalidate(5,
                false);

            // Display the training and validation errors.
            Console.WriteLine(@"Training error: "
                              + model.CalculateError(bestMethod,
                                  model.TrainingDataset));
            Console.WriteLine(@"Validation error: "
                              + model.CalculateError(bestMethod,
                                  model.ValidationDataset));

            // Display our normalization parameters.
            NormalizationHelper helper = data.NormHelper;
            Console.WriteLine(helper.ToString());

            // Display the final model.
            Console.WriteLine(@"Final model: " + bestMethod);

            // Loop over the entire, original, dataset and feed it through the
            // model. This also shows how you would process new data, that was
            // not part of your training set. You do not need to retrain, simply
            // use the NormalizationHelper class. After you train, you can save
            // the NormalizationHelper to later normalize and denormalize your
            // data.
            source.Close();
            var csv = new ReadCSV(filename, true, format);
            var line = new String[2];

            // Create a vector to hold each time-slice, as we build them.
            // These will be grouped together into windows.
            var slice = new double[2];
            var window = new VectorWindow(WindowSize + 1);
            IMLData input = helper.AllocateInputVector(WindowSize + 1);

            // Only display the first 100
            int stopAfter = 100;

            while (csv.Next() && stopAfter > 0)
            {
                var result = new StringBuilder();

                line[0] = csv.Get(2); // ssn
                line[1] = csv.Get(3); // dev
                helper.NormalizeInputVector(line, slice, false);

                // enough data to build a full window?
                if (window.IsReady())
                {
                    window.CopyWindow(((BasicMLData) input).Data, 0);
                    String correct = csv.Get(2); // trying to predict SSN.
                    IMLData output = bestMethod.Compute(input);
                    String predicted = helper
                        .DenormalizeOutputVectorToString(output)[0];

                    result.Append(line);
                    result.Append(" -> predicted: ");
                    result.Append(predicted);
                    result.Append("(correct: ");
                    result.Append(correct);
                    result.Append(")");

                    Console.WriteLine(result.ToString());
                }

                // Add the normalized slice to the window. We do this just after
                // the after checking to see if the window is ready so that the
                // window is always one behind the current row. This is because
                // we are trying to predict next row.
                window.Add(slice);

                stopAfter--;
            }
            csv.Close();

            // Delete data file and shut down.
            File.Delete(filename);
            EncogFramework.Instance.Shutdown();
        }
Пример #20
0
        /// <summary>
        /// Process the input file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file to write to.</param>
        public void Process(FileInfo outputFile)
        {
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, InputFormat);

            StreamWriter tw = PrepareOutputFile(outputFile);
            _filteredCount = 0;

            ResetStatus();
            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus(false);
                var row = new LoadedRow(csv);
                if (ShouldProcess(row))
                {
                    WriteRow(tw, row);
                    _filteredCount++;
                }
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
Пример #21
0
 public void Process()
 {
     ReadCSV dcsv;
     this.x461c3bf969128260();
     Label_0006:
     dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat);
     base.ResetStatus();
     using (IEnumerator<SegregateTargetPercent> enumerator = this._x2ea7a1eff81ae7c0.GetEnumerator())
     {
         SegregateTargetPercent percent;
         StreamWriter writer;
         goto Label_0044;
     Label_0038:
         if (0 != 0)
         {
             goto Label_00D1;
         }
     Label_003E:
         writer.Close();
     Label_0044:
         if (enumerator.MoveNext())
         {
             goto Label_00D1;
         }
         if (0 == 0)
         {
             goto Label_00EE;
         }
         if (0 == 0)
         {
             goto Label_00D1;
         }
         if (0 == 0)
         {
             goto Label_00BC;
         }
         if (0 == 0)
         {
             goto Label_0098;
         }
         goto Label_003E;
     Label_0067:
         if (percent.NumberRemaining > 0)
         {
             goto Label_0086;
         }
         if (0 == 0)
         {
             goto Label_00B9;
         }
         goto Label_0098;
     Label_0075:
         percent.NumberRemaining--;
     Label_0083:
         if (0 == 0)
         {
             goto Label_0067;
         }
     Label_0086:
         if (!dcsv.Next() || base.ShouldStop())
         {
             goto Label_003E;
         }
     Label_0098:
         base.UpdateStatus(false);
         LoadedRow row = new LoadedRow(dcsv);
         base.WriteRow(writer, row);
         if (4 == 0)
         {
             goto Label_0083;
         }
         goto Label_0075;
     Label_00B9:
         if (0 == 0)
         {
             goto Label_00CE;
         }
     Label_00BC:
         writer = base.PrepareOutputFile(percent.Filename);
         goto Label_0067;
     Label_00CE:
         if (0 == 0)
         {
             goto Label_0038;
         }
     Label_00D1:
         percent = enumerator.Current;
         goto Label_00BC;
     }
     Label_00EE:
     base.ReportDone(false);
     if (0 != 0)
     {
         goto Label_0006;
     }
     dcsv.Close();
 }
        /// <summary>
        /// Process the file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file.</param>
        /// <param name="method">THe method to use.</param>
        public void Process(FileInfo outputFile, IMLMethod method)
        {
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

            IMLData output;

            int outputLength = _analyst.DetermineTotalInputFieldCount();

            StreamWriter tw = PrepareOutputFile(outputFile);

            ResetStatus();
            while (csv.Next())
            {
                UpdateStatus(false);
                var row = new LoadedRow(csv, _outputColumns);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst,
                                                                        _analystHeaders, csv, outputLength, true);
                if (_series.TotalDepth > 1)
                {
                    inputArray = _series.Process(inputArray);
                }

                if (inputArray != null)
                {
                    IMLData input = new BasicMLData(inputArray);

                    // evaluation data
                    if ((method is IMLClassification)
                        && !(method is IMLRegression))
                    {
                        // classification only?
                        output = new BasicMLData(1);
                        output[0] =
                            ((IMLClassification) method).Classify(input);
                    }
                    else
                    {
                        // regression
                        output = ((IMLRegression) method).Compute(input);
                    }

                    // skip file data
                    int index = _fileColumns;
                    int outputIndex = 0;


                    // display output
                    foreach (AnalystField field  in  _analyst.Script.Normalize.NormalizedFields)
                    {
                        if (_analystHeaders.Find(field.Name) != -1)
                        {
                            if (field.Output)
                            {
                                if (field.Classify)
                                {
                                    // classification
                                    ClassItem cls = field.DetermineClass(
                                        outputIndex, output.Data);
                                    outputIndex += field.ColumnsNeeded;
                                    if (cls == null)
                                    {
                                        row.Data[index++] = "?Unknown?";
                                    }
                                    else
                                    {
                                        row.Data[index++] = cls.Name;
                                    }
                                }
                                else
                                {
                                    // regression
                                    double n = output[outputIndex++];
                                    n = field.DeNormalize(n);
                                    row.Data[index++] = Format
                                        .Format(n, Precision);
                                }
                            }
                        }
                    }
                }

                WriteRow(tw, row);
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
        /// <summary>
        /// Process the individual training file. 
        /// </summary>
        /// <param name="file">The training file to process.</param>
        /// <param name="output">The data set to output to.</param>
        protected void ProcessFile(string file, BufferedMLDataSet output)
        {
            var inputData = new BasicMLData(output.InputSize);
            var idealData = new BasicMLData(output.IdealSize);

            var csv = new ReadCSV(file, true, CSVFormat.English);
            while (csv.Next())
            {
                var a = new double[Config.InputWindow + 1];
                double close = csv.GetDouble(1);

                const int fastIndex = 2;
                const int slowIndex = fastIndex + Config.InputWindow;

                a[0] = close;
                for (int i = 0; i < 3; i++)
                {
                    double fast = csv.GetDouble(fastIndex + i);
                    double slow = csv.GetDouble(slowIndex + i);
                    double diff = _fieldDifference.Normalize((fast - slow)/Config.PipSize);
                    a[i + 1] = diff;
                }
                _window.Add(a);

                if (_window.IsFull())
                {
                    double max = (_window.CalculateMax(0, Config.InputWindow) - close)/Config.PipSize;
                    double min = (_window.CalculateMin(0, Config.InputWindow) - close)/Config.PipSize;

                    double o = Math.Abs(max) > Math.Abs(min) ? max : min;

                    a = _window.GetLast();
                    for (int i = 0; i < 3; i++)
                    {
                        inputData[i] = a[i + 1];
                    }

                    o = _fieldOutcome.Normalize(o);
                    idealData[0] = o;

                    output.Add(inputData, idealData);
                }
            }
        }
        /// <summary>
        ///     Perform a basic analyze of the file. This method is used mostly
        ///     internally.
        /// </summary>
        public void PerformBasicCounts()
        {
            ResetStatus();
            int rc = 0;
            var csv = new ReadCSV(_inputFilename.ToString(),
                                  _expectInputHeaders, _format);
            while (csv.Next() && !_cancel)
            {
                UpdateStatus(true);
                rc++;
            }
            _recordCount = rc;
            _columnCount = csv.ColumnCount;

            ReadHeaders(csv);
            csv.Close();
            ReportDone(true);
        }
Пример #25
0
        /// <summary>
        ///     Private constructor.
        /// </summary>
        private PropertyConstraints()
        {
            _data = new Dictionary<String, List<PropertyEntry>>();
            try
            {
                Stream mask0 = ResourceLoader.CreateStream("Encog.Resources.analyst.csv");
                var csv = new ReadCSV(mask0, false, CSVFormat.EgFormat);

                while (csv.Next())
                {
                    String sectionStr = csv.Get(0);
                    String nameStr = csv.Get(1);
                    String typeStr = csv.Get(2);

                    // determine type
                    PropertyType t;
                    if ("boolean".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeBoolean;
                    }
                    else if ("real".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeDouble;
                    }
                    else if ("format".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeFormat;
                    }
                    else if ("int".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeInteger;
                    }
                    else if ("list-string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeListString;
                    }
                    else if ("string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeString;
                    }
                    else
                    {
                        throw new AnalystError("Unknown type constraint: "
                                               + typeStr);
                    }

                    var entry = new PropertyEntry(t, nameStr,
                                                  sectionStr);
                    List<PropertyEntry> list;

                    if (_data.ContainsKey(sectionStr))
                    {
                        list = _data[sectionStr];
                    }
                    else
                    {
                        list = new List<PropertyEntry>();
                        _data[sectionStr] = list;
                    }

                    list.Add(entry);
                }

                csv.Close();
                mask0.Close();
            }
            catch (IOException e)
            {
                throw new EncogError(e);
            }
        }
Пример #26
0
        /// <summary>
        /// Read the input file.
        /// </summary>
        ///
        private void ReadInputFile()
        {
            ResetStatus();

            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, InputFormat);
            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus("Reading input file");
                var row = new LoadedRow(csv);
                _data.Add(row);
            }

            Count = csv.ColumnCount;

            if (ExpectInputHeaders)
            {
                InputHeadings = new String[csv.ColumnCount];
                for (int i = 0; i < csv.ColumnCount; i++)
                {
                    InputHeadings[i] = csv.ColumnNames[i];
                }
            }

            csv.Close();
        }
        /// <summary>
        ///     Process the input file and segregate into the output files.
        /// </summary>
        public void Process()
        {
            Validate();

            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);
            ResetStatus();

            foreach (SegregateTargetPercent target  in  _targets)
            {
                StreamWriter tw = PrepareOutputFile(target.Filename);

                while ((target.NumberRemaining > 0) && csv.Next()
                       && !ShouldStop())
                {
                    UpdateStatus(false);
                    var row = new LoadedRow(csv);
                    WriteRow(tw, row);
                    target.NumberRemaining = target.NumberRemaining - 1;
                }

                tw.Close();
            }
            ReportDone(false);
            csv.Close();
        }
        /// <summary>
        ///     Read the CSV file.
        /// </summary>
        private void ReadFile()
        {
            ReadCSV csv = null;

            try
            {
                csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

                ResetStatus();
                int row = 0;
                while (csv.Next() && !ShouldStop())
                {
                    UpdateStatus("Reading data");

                    foreach (BaseCachedColumn column  in  Columns)
                    {
                        if (column is FileData)
                        {
                            if (column.Input)
                            {
                                var fd = (FileData) column;
                                String str = csv.Get(fd.Index);
                                double d = Format.Parse(str);
                                fd.Data[row] = d;
                            }
                        }
                    }
                    row++;
                }
            }
            finally
            {
                ReportDone("Reading data");
                if (csv != null)
                {
                    csv.Close();
                }
            }
        }
Пример #29
0
        /// <summary>
        /// Process the file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file.</param>
        /// <param name="method">The method to use.</param>
        public void Process(FileInfo outputFile, IMLRegression method)
        {
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

            if (method.InputCount > _inputCount)
            {
                throw new AnalystError("This machine learning method has "
                                       + method.InputCount
                                       + " inputs, however, the data has " + _inputCount
                                       + " inputs.");
            }

            var input = new BasicMLData(method.InputCount);

            StreamWriter tw = AnalystPrepareOutputFile(outputFile);

            ResetStatus();
            while (csv.Next())
            {
                UpdateStatus(false);
                var row = new LoadedRow(csv, _idealCount);

                int dataIndex = 0;
                // load the input data
                for (int i = 0; i < _inputCount; i++)
                {
                    String str = row.Data[i];
                    double d = Format.Parse(str);
                    input[i] = d;
                    dataIndex++;
                }

                // do we need to skip the ideal values?
                dataIndex += _idealCount;

                // compute the result
                IMLData output = method.Compute(input);

                // display the computed result
                for (int i = 0; i < _outputCount; i++)
                {
                    double d = output[i];
                    row.Data[dataIndex++] = Format.Format(d, Precision);
                }

                WriteRow(tw, row);
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
Пример #30
0
        /// <summary>
        /// Convert a CSV file to binary.
        /// </summary>
        /// <param name="csvFile">The CSV file to convert.</param>
        /// <param name="format">The format.</param>
        /// <param name="binFile">The binary file.</param>
        /// <param name="input">The input.</param>
        /// <param name="ideal">The ideal.</param>
        /// <param name="headers">True, if headers are present.</param>
        public static void ConvertCSV2Binary(FileInfo csvFile, CSVFormat format,
                                             FileInfo binFile, int[] input, int[] ideal, bool headers)
        {
            binFile.Delete();
            var csv = new ReadCSV(csvFile.ToString(), headers, format);

            var buffer = new BufferedMLDataSet(binFile.ToString());
            buffer.BeginLoad(input.Length, ideal.Length);
            while (csv.Next())
            {
                var inputData = new BasicMLData(input.Length);
                var idealData = new BasicMLData(ideal.Length);

                // handle input data
                for (int i = 0; i < input.Length; i++)
                {
                    inputData[i] = csv.GetDouble(input[i]);
                }

                // handle input data
                for (int i = 0; i < ideal.Length; i++)
                {
                    idealData[i] = csv.GetDouble(ideal[i]);
                }

                // add to dataset

                buffer.Add(inputData, idealData);
            }
            buffer.EndLoad();
        }