Close() 공개 메소드

Close the file.
public Close ( ) : void
리턴 void
예제 #1
0
        protected override void LoadTestData(string testFile)
        {
            ReadCSV test_csv = new ReadCSV(testFile, true, CSVFormat.DecimalPoint);

            List<double[]> test_input = new List<double[]>();
            test_input_orig = new List<double[]>();

            while (test_csv.Next())
            {
                double x = test_csv.GetDouble(0);

                test_input.Add(new[] { x });
                test_input_orig.Add(new[] { x });
            }

            test_csv.Close();

            //Analyze(ref test_input);
            Normalize(ref test_input, ref vmin, ref vmax);

            testData = new List<IMLData>();
            foreach (var d in test_input)
            {
                testData.Add(new BasicMLData(d));
            }
        }
예제 #2
0
        /// <summary>
        /// Process the file and output to the target file.
        /// </summary>
        /// <param name="target">The target file to write to.</param>
        public void Process(string target)
        {
            var csv = new ReadCSV(InputFilename.ToString(), ExpectInputHeaders, Format);
            TextWriter tw = new StreamWriter(target);

            ResetStatus();
            while (csv.Next())
            {
                var line = new StringBuilder();
                UpdateStatus(false);
                line.Append(GetColumnData(FileData.Date, csv));
                line.Append(" ");
                line.Append(GetColumnData(FileData.Time, csv));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Open, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.High, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Low, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Close, csv)), Precision));
                line.Append(";");
                line.Append(Format.Format(double.Parse(GetColumnData(FileData.Volume, csv)), Precision));

                tw.WriteLine(line.ToString());
            }
            ReportDone(false);
            csv.Close();
            tw.Close();
        }
예제 #3
0
        public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IList<MarketDataType> neededTypes, DateTime from, DateTime to, string File)
        {
            try
            {
                //We got a file, lets load it.
                ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                ReadCSV csv = new ReadCSV(File, true, CSVFormat.English);
                csv.DateFormat = "yyyy.MM.dd HH:mm:ss";

                DateTime ParsedDate = from;


                //  Time,Open,High,Low,Close,Volume
                while (csv.Next() && ParsedDate >= from && ParsedDate <= to  )
                {
                    DateTime date = csv.GetDate("Time");
                    double Bid= csv.GetDouble("Bid");
                    double Ask = csv.GetDouble("Ask");
                    double AskVolume = csv.GetDouble("AskVolume");
                    double BidVolume= csv.GetDouble("BidVolume");
                    double _trade = ( Bid + Ask ) /2;
                    double _tradeSize = (AskVolume + BidVolume) / 2;
                    LoadedMarketData data = new LoadedMarketData(date, symbol);
                    data.SetData(MarketDataType.Trade, _trade);
                    data.SetData(MarketDataType.Volume, _tradeSize);
                    result.Add(data);

                    Console.WriteLine("Current DateTime:"+ParsedDate.ToShortDateString()+ " Time:"+ParsedDate.ToShortTimeString() +"  Start date was "+from.ToShortDateString());
                    Console.WriteLine("Stopping at date:" + to.ToShortDateString() );
                    ParsedDate = date;
                    //double open = csv.GetDouble("Open");
                    //double close = csv.GetDouble("High");
                    //double high = csv.GetDouble("Low");
                    //double low = csv.GetDouble("Close");
                    //double volume = csv.GetDouble("Volume");
                    //LoadedMarketData data = new LoadedMarketData(date, symbol);
                    //data.SetData(MarketDataType.Open, open);
                    //data.SetData(MarketDataType.High, high);
                    //data.SetData(MarketDataType.Low, low);
                    //data.SetData(MarketDataType.Close, close);
                    //data.SetData(MarketDataType.Volume, volume);
                    result.Add(data);
                }

                csv.Close();
                return result;
            }

            catch (Exception ex)
            {

                Console.WriteLine("Something went wrong reading the csv");
                Console.WriteLine("Something went wrong reading the csv:" + ex.Message);
            }

            Console.WriteLine("Something went wrong reading the csv");
            return null;
        }
예제 #4
0
파일: csvloader.cs 프로젝트: jongh0/MTree
        public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol symbol, IList<MarketDataType> neededTypes, DateTime from, DateTime to,string File)
        {
            try
            {


                        //We got a file, lets load it.

                    

                        ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                        ReadCSV csv = new ReadCSV(File, true,LoadedFormat);


                        csv.DateFormat = DateTimeFormat.Normalize();
                        //  Time,Open,High,Low,Close,Volume
                        while (csv.Next())
                        {
                            DateTime date = csv.GetDate("Time");
                            double open = csv.GetDouble("Open");
                            double close = csv.GetDouble("High");
                            double high = csv.GetDouble("Low");
                            double low = csv.GetDouble("Close");
                            double volume = csv.GetDouble("Volume");
                            LoadedMarketData data = new LoadedMarketData(date, symbol);
                            data.SetData(MarketDataType.Open, open);
                            data.SetData(MarketDataType.High, high);
                            data.SetData(MarketDataType.Low, low);
                            data.SetData(MarketDataType.Close, close);
                            data.SetData(MarketDataType.Volume, volume);
                            result.Add(data);
                        }

                        csv.Close();
                        return result;                 
                }
            
            catch (Exception ex)
            {
                
              Console.WriteLine("Something went wrong reading the csv");
              Console.WriteLine("Something went wrong reading the csv:"+ex.Message);
            }

            Console.WriteLine("Something went wrong reading the csv");
            return null;
        }
예제 #5
0
파일: ShuffleCSV.cs 프로젝트: neismit/emds
 public void Process(FileInfo outputFile)
 {
     base.ValidateAnalyzed();
     ReadCSV dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat);
     if (0 == 0)
     {
         LoadedRow row;
         StreamWriter tw = base.PrepareOutputFile(outputFile);
         base.ResetStatus();
         while ((row = this.x75f8dae9674cb841(dcsv)) != null)
         {
             base.WriteRow(tw, row);
             base.UpdateStatus(false);
         }
         base.ReportDone(false);
         tw.Close();
     }
     dcsv.Close();
 }
예제 #6
0
        /// <summary>
        /// Load financial data from a CSV file.
        /// </summary>
        /// <param name="ticker">The ticker being loaded, ignored for a CSV load.</param>
        /// <param name="dataNeeded">The data needed.</param>
        /// <param name="from">The starting date.</param>
        /// <param name="to">The ending date.</param>
        /// <returns></returns>
        public ICollection<LoadedMarketData> Load(TickerSymbol ticker, IList<MarketDataType> dataNeeded, DateTime from,
                                                  DateTime to)
        {
            try
            {
                if (File.Exists(TheFile))
                {
                    //We got a file, lets load it.
                    TheFile = TheFile;
                    ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                    var csv = new ReadCSV(TheFile, true, CSVFormat.English);

                    //  Time,Open,High,Low,Close,Volume
                    while (csv.Next())
                    {
                        DateTime date = csv.GetDate("Time");
                        double open = csv.GetDouble("Open");
                        double close = csv.GetDouble("High");
                        double high = csv.GetDouble("Low");
                        double low = csv.GetDouble("Close");
                        double volume = csv.GetDouble("Volume");
                        var data = new LoadedMarketData(date, ticker);
                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.Volume, close);
                        data.SetData(MarketDataType.High, high);
                        data.SetData(MarketDataType.Low, low);
                        data.SetData(MarketDataType.Volume, volume);
                        result.Add(data);
                    }

                    csv.Close();
                    return result;
                }
            }
            catch (Exception ex)
            {
                throw new LoaderError(ex);
            }

            throw new LoaderError(@"Something went wrong reading the csv");
        }
        /// <summary>
        ///     Construct the object.
        /// </summary>
        /// <param name="filename">The filename.</param>
        /// <param name="headers">False if headers are not extended.</param>
        /// <param name="format">The CSV format.</param>
        public CSVHeaders(FileInfo filename, bool headers,
                          CSVFormat format)
        {
            _headerList = new List<String>();
            _columnMapping = new Dictionary<String, Int32>();
            ReadCSV csv = null;
            try
            {
                csv = new ReadCSV(filename.ToString(), headers, format);
                if (csv.Next())
                {
                    if (headers)
                    {
                        foreach (String str  in  csv.ColumnNames)
                        {
                            _headerList.Add(str);
                        }
                    }
                    else
                    {
                        for (int i = 0; i < csv.ColumnCount; i++)
                        {
                            _headerList.Add("field:" + (i + 1));
                        }
                    }
                }

                Init();
            }
            finally
            {
                if (csv != null)
                {
                    csv.Close();
                }
            }
        }
예제 #8
0
        /// <summary>
        ///     Analyze the data. This counts the records and prepares the data to be
        ///     processed.
        /// </summary>
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file to analyze.</param>
        /// <param name="headers">True, if the input file has headers.</param>
        /// <param name="format">The format of the input file.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename = inputFile;
            ExpectInputHeaders = headers;
            Format = format;

            Analyzed = true;
            _analyst = theAnalyst;

            _data = new BasicMLDataSet();
            ResetStatus();
            int recordCount = 0;

            int outputLength = _analyst.DetermineTotalColumns();
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);
            ReadHeaders(csv);

            _analystHeaders = new CSVHeaders(InputHeadings);

            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus(true);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(
                    _analyst, _analystHeaders, csv, outputLength, true);

                IMLData input = new BasicMLData(inputArray);
                _data.Add(new BasicMLDataPair(input));

                recordCount++;
            }
            RecordCount = recordCount;
            Count = csv.ColumnCount;

            ReadHeaders(csv);
            csv.Close();
            ReportDone(true);
        }
예제 #9
0
 private void x08af8e36ac9914b5()
 {
     ReadCSV dcsv = null;
     try
     {
         int num;
         double num2;
         dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat);
         goto Label_006B;
     Label_0021:
         num++;
         if ((((uint) num2) & 0) == 0)
         {
         }
     Label_005E:
         while (dcsv.Next())
         {
             if (!base.ShouldStop())
             {
                 goto Label_0075;
             }
             if ((((uint) num2) + ((uint) num)) <= uint.MaxValue)
             {
                 break;
             }
         }
         return;
     Label_006B:
         base.ResetStatus();
         num = 0;
         goto Label_005E;
     Label_0075:
         base.UpdateStatus("Reading data");
         using (IEnumerator<BaseCachedColumn> enumerator = base.Columns.GetEnumerator())
         {
             BaseCachedColumn column;
             FileData data;
         Label_008F:
             if (enumerator.MoveNext() || ((((uint) num) + ((uint) num)) > uint.MaxValue))
             {
                 goto Label_011D;
             }
             goto Label_0021;
         Label_00BD:
             if (column.Input)
             {
                 goto Label_00D8;
             }
             goto Label_008F;
         Label_00C7:
             if (0 == 0)
             {
             }
             goto Label_008F;
         Label_00CC:
             if (column is FileData)
             {
                 goto Label_00BD;
             }
             goto Label_00C7;
         Label_00D8:
             data = (FileData) column;
             string str = dcsv.Get(data.Index);
             num2 = base.InputFormat.Parse(str);
             data.Data[num] = num2;
             goto Label_008F;
         Label_0111:
             if (0 == 0)
             {
                 goto Label_00CC;
             }
             goto Label_00BD;
         Label_011D:
             column = enumerator.Current;
             goto Label_0111;
         }
     }
     finally
     {
         base.ReportDone("Reading data");
         if (dcsv != null)
         {
             dcsv.Close();
         }
     }
 }
        /// <summary>
        ///     Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            ErrorCalculation.Mode = ErrorCalculationMode.RMS;
            // Download the data that we will attempt to model.
            string filename = DownloadData(app.Args);

            // Define the format of the data file.
            // This area will change, depending on the columns and
            // format of the file that you are trying to model.
            var format = new CSVFormat('.', ' '); // decimal point and
            // space separated
            IVersatileDataSource source = new CSVDataSource(filename, true,
                format);

            var data = new VersatileMLDataSet(source);
            data.NormHelper.Format = format;

            ColumnDefinition columnSSN = data.DefineSourceColumn("SSN",
                ColumnType.Continuous);
            ColumnDefinition columnDEV = data.DefineSourceColumn("DEV",
                ColumnType.Continuous);

            // Analyze the data, determine the min/max/mean/sd of every column.
            data.Analyze();

            // Use SSN & DEV to predict SSN. For time-series it is okay to have
            // SSN both as
            // an input and an output.
            data.DefineInput(columnSSN);
            data.DefineInput(columnDEV);
            data.DefineOutput(columnSSN);

            // Create feedforward neural network as the model type.
            // MLMethodFactory.TYPE_FEEDFORWARD.
            // You could also other model types, such as:
            // MLMethodFactory.SVM: Support Vector Machine (SVM)
            // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network
            // MLMethodFactor.TYPE_NEAT: NEAT Neural Network
            // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network
            var model = new EncogModel(data);
            model.SelectMethod(data, MLMethodFactory.TypeFeedforward);

            // Send any output to the console.
            model.Report = new ConsoleStatusReportable();

            // Now normalize the data. Encog will automatically determine the
            // correct normalization
            // type based on the model you chose in the last step.
            data.Normalize();

            // Set time series.
            data.LeadWindowSize = 1;
            data.LagWindowSize = WindowSize;

            // Hold back some data for a final validation.
            // Do not shuffle the data into a random ordering. (never shuffle
            // time series)
            // Use a seed of 1001 so that we always use the same holdback and
            // will get more consistent results.
            model.HoldBackValidation(0.3, false, 1001);

            // Choose whatever is the default training type for this model.
            model.SelectTrainingType(data);

            // Use a 5-fold cross-validated train. Return the best method found.
            // (never shuffle time series)
            var bestMethod = (IMLRegression) model.Crossvalidate(5,
                false);

            // Display the training and validation errors.
            Console.WriteLine(@"Training error: "
                              + model.CalculateError(bestMethod,
                                  model.TrainingDataset));
            Console.WriteLine(@"Validation error: "
                              + model.CalculateError(bestMethod,
                                  model.ValidationDataset));

            // Display our normalization parameters.
            NormalizationHelper helper = data.NormHelper;
            Console.WriteLine(helper.ToString());

            // Display the final model.
            Console.WriteLine(@"Final model: " + bestMethod);

            // Loop over the entire, original, dataset and feed it through the
            // model. This also shows how you would process new data, that was
            // not part of your training set. You do not need to retrain, simply
            // use the NormalizationHelper class. After you train, you can save
            // the NormalizationHelper to later normalize and denormalize your
            // data.
            source.Close();
            var csv = new ReadCSV(filename, true, format);
            var line = new String[2];

            // Create a vector to hold each time-slice, as we build them.
            // These will be grouped together into windows.
            var slice = new double[2];
            var window = new VectorWindow(WindowSize + 1);
            IMLData input = helper.AllocateInputVector(WindowSize + 1);

            // Only display the first 100
            int stopAfter = 100;

            while (csv.Next() && stopAfter > 0)
            {
                var result = new StringBuilder();

                line[0] = csv.Get(2); // ssn
                line[1] = csv.Get(3); // dev
                helper.NormalizeInputVector(line, slice, false);

                // enough data to build a full window?
                if (window.IsReady())
                {
                    window.CopyWindow(((BasicMLData) input).Data, 0);
                    String correct = csv.Get(2); // trying to predict SSN.
                    IMLData output = bestMethod.Compute(input);
                    String predicted = helper
                        .DenormalizeOutputVectorToString(output)[0];

                    result.Append(line);
                    result.Append(" -> predicted: ");
                    result.Append(predicted);
                    result.Append("(correct: ");
                    result.Append(correct);
                    result.Append(")");

                    Console.WriteLine(result.ToString());
                }

                // Add the normalized slice to the window. We do this just after
                // the after checking to see if the window is ready so that the
                // window is always one behind the current row. This is because
                // we are trying to predict next row.
                window.Add(slice);

                stopAfter--;
            }
            csv.Close();

            // Delete data file and shut down.
            File.Delete(filename);
            EncogFramework.Instance.Shutdown();
        }
        /// <summary>
        ///     Read the CSV file.
        /// </summary>
        private void ReadFile()
        {
            ReadCSV csv = null;

            try
            {
                csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

                ResetStatus();
                int row = 0;
                while (csv.Next() && !ShouldStop())
                {
                    UpdateStatus("Reading data");

                    foreach (BaseCachedColumn column  in  Columns)
                    {
                        if (column is FileData)
                        {
                            if (column.Input)
                            {
                                var fd = (FileData) column;
                                String str = csv.Get(fd.Index);
                                double d = Format.Parse(str);
                                fd.Data[row] = d;
                            }
                        }
                    }
                    row++;
                }
            }
            finally
            {
                ReportDone("Reading data");
                if (csv != null)
                {
                    csv.Close();
                }
            }
        }
예제 #12
0
 public void Normalize(FileInfo file)
 {
     int num;
     if (this._x554f16462d8d4675 == null)
     {
         throw new EncogError("Can't normalize yet, file has not been analyzed.");
     }
     ReadCSV csv = null;
     StreamWriter writer = null;
     try
     {
         double[] numArray;
         StringBuilder builder;
         csv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat);
         if ((((uint) num) | uint.MaxValue) != 0)
         {
             goto Label_0110;
         }
     Label_004F:
         writer.WriteLine(builder);
         goto Label_0063;
     Label_0059:
         if (numArray != null)
         {
             goto Label_0090;
         }
         if (1 == 0)
         {
             goto Label_0081;
         }
     Label_0063:
         if (csv.Next() && !base.ShouldStop())
         {
             goto Label_0105;
         }
         return;
     Label_007E:
         if (0 != 0)
         {
             goto Label_0063;
         }
     Label_0081:
         numArray = this._x7acb8518c8ed6133.Process(numArray);
         goto Label_0059;
     Label_0090:
         builder = new StringBuilder();
         NumberList.ToList(base.OutputFormat, builder, numArray);
         goto Label_004F;
     Label_00A7:
         numArray = ExtractFields(this._x554f16462d8d4675, this._xc5416b6511261016, csv, num, false);
         if (this._x7acb8518c8ed6133.TotalDepth <= 1)
         {
             goto Label_0059;
         }
         if (((uint) num) >= 0)
         {
             goto Label_007E;
         }
         goto Label_004F;
     Label_00E1:
         this.x6c260f7f6142106c(writer);
     Label_00E8:
         base.ResetStatus();
         if (0 == 0)
         {
             num = this._x554f16462d8d4675.DetermineTotalColumns();
         }
         goto Label_0063;
     Label_0105:
         base.UpdateStatus(false);
         goto Label_00A7;
     Label_0110:
         file.Delete();
         writer = new StreamWriter(file.OpenWrite());
         if (!base.ProduceOutputHeaders)
         {
             goto Label_00E8;
         }
         goto Label_00E1;
     }
     catch (IOException exception)
     {
         throw new QuantError(exception);
     }
     finally
     {
         base.ReportDone(false);
         goto Label_01B0;
     Label_014E:
         if ((((uint) num) - ((uint) num)) > uint.MaxValue)
         {
             goto Label_0173;
         }
         if (0 == 0)
         {
             goto Label_01D5;
         }
         goto Label_01B0;
     Label_016E:
         if (writer != null)
         {
             goto Label_019B;
         }
         goto Label_014E;
     Label_0173:
         if (2 == 0)
         {
             goto Label_016E;
         }
         if ((((uint) num) - ((uint) num)) < 0)
         {
             goto Label_0173;
         }
         goto Label_01D5;
     Label_0196:
         if (csv != null)
         {
             goto Label_01B7;
         }
         goto Label_016E;
     Label_019B:
         try
         {
             writer.Close();
         }
         catch (Exception exception3)
         {
             EncogLogging.Log(exception3);
         }
         goto Label_01D5;
     Label_01B0:
         if (1 != 0)
         {
             goto Label_0196;
         }
     Label_01B7:
         try
         {
             csv.Close();
         }
         catch (Exception exception2)
         {
             EncogLogging.Log(exception2);
         }
         goto Label_016E;
         if (0 == 0)
         {
             goto Label_016E;
         }
         goto Label_014E;
     Label_01D5:;
     }
 }
예제 #13
0
        /// <summary>
        /// Process the input file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file to write to.</param>
        public void Process(FileInfo outputFile)
        {
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, InputFormat);

            StreamWriter tw = PrepareOutputFile(outputFile);
            _filteredCount = 0;

            ResetStatus();
            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus(false);
                var row = new LoadedRow(csv);
                if (ShouldProcess(row))
                {
                    WriteRow(tw, row);
                    _filteredCount++;
                }
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
예제 #14
0
        /// <summary>
        ///     Perform the analysis.
        /// </summary>
        /// <param name="target">The Encog analyst object to analyze.</param>
        public void Process(EncogAnalyst target)
        {
            int count = 0;
            CSVFormat csvFormat = ConvertStringConst
                .ConvertToCSVFormat(_format);
            var csv = new ReadCSV(_filename, _headers, csvFormat);

            // pass one, calculate the min/max
            while (csv.Next())
            {
                if (_fields == null)
                {
                    GenerateFields(csv);
                }

                for (int i = 0; i < csv.ColumnCount; i++)
                {
                    if (_fields != null)
                    {
                        _fields[i].Analyze1(csv.Get(i));
                    }
                }
                count++;
            }

            if (count == 0)
            {
                throw new AnalystError("Can't analyze file, it is empty.");
            }

            if (_fields != null)
            {
                foreach (AnalyzedField field in _fields)
                {
                    field.CompletePass1();
                }
            }

            csv.Close();

            // pass two, standard deviation
            csv = new ReadCSV(_filename, _headers, csvFormat);
           
            while (csv.Next())
            {
                for (int i = 0; i < csv.ColumnCount; i++)
                {
                    if (_fields != null)
                    {
                        _fields[i].Analyze2(csv.Get(i));
                    }
                }
            }


            if (_fields != null)
            {
                foreach (AnalyzedField field in _fields)
                {
                    field.CompletePass2();
                }
            }

            csv.Close();

            String str = _script.Properties.GetPropertyString(
                ScriptProperties.SetupConfigAllowedClasses) ?? "";

            bool allowInt = str.Contains("int");
            bool allowReal = str.Contains("real")
                             || str.Contains("double");
            bool allowString = str.Contains("string");


            // remove any classes that did not qualify
            foreach (AnalyzedField field  in  _fields)
            {
                if (field.Class)
                {
                    if (!allowInt && field.Integer)
                    {
                        field.Class = false;
                    }

                    if (!allowString && (!field.Integer && !field.Real))
                    {
                        field.Class = false;
                    }

                    if (!allowReal && field.Real && !field.Integer)
                    {
                        field.Class = false;
                    }
                }
            }

            // merge with existing
            if ((target.Script.Fields != null)
                && (_fields.Length == target.Script.Fields.Length))
            {
                for (int i = 0; i < _fields.Length; i++)
                {
                    // copy the old field name
                    _fields[i].Name = target.Script.Fields[i].Name;

                    if (_fields[i].Class)
                    {
                        IList<AnalystClassItem> t = _fields[i].AnalyzedClassMembers;
                        IList<AnalystClassItem> s = target.Script.Fields[i].ClassMembers;

                        if (s.Count == t.Count)
                        {
                            for (int j = 0; j < s.Count; j++)
                            {
                                if (t[j].Code.Equals(s[j].Code))
                                {
                                    t[j].Name = s[j].Name;
                                }
                            }
                        }
                    }
                }
            }

            // now copy the fields
            var df = new DataField[_fields.Length];

            for (int i_4 = 0; i_4 < df.Length; i_4++)
            {
                df[i_4] = _fields[i_4].FinalizeField();
            }

            target.Script.Fields = df;
        }
        /// <summary>
        ///     Process and balance the data.
        /// </summary>
        /// <param name="outputFile">The output file to write data to.</param>
        /// <param name="targetField"></param>
        /// <param name="countPer">The desired count per class.</param>
        public void Process(FileInfo outputFile, int targetField,
                            int countPer)
        {
            ValidateAnalyzed();
            StreamWriter tw = PrepareOutputFile(outputFile);

            _counts = new Dictionary<String, Int32>();

            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

            ResetStatus();
            while (csv.Next() && !ShouldStop())
            {
                var row = new LoadedRow(csv);
                UpdateStatus(false);
                String key = row.Data[targetField];
                int count;
                if (!_counts.ContainsKey(key))
                {
                    count = 0;
                }
                else
                {
                    count = _counts[key];
                }

                if (count < countPer)
                {
                    WriteRow(tw, row);
                    count++;
                }

                _counts[key] = count;
            }
            ReportDone(false);
            csv.Close();
            tw.Close();
        }
        /// <summary>
        ///     Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            // Download the data that we will attempt to model.
            string filename = DownloadData(app.Args);

            // Define the format of the data file.
            // This area will change, depending on the columns and 
            // format of the file that you are trying to model.
            var format = new CSVFormat('.', ' '); // decimal point and space separated
            IVersatileDataSource source = new CSVDataSource(filename, false, format);

            var data = new VersatileMLDataSet(source);
            data.NormHelper.Format = format;

            ColumnDefinition columnMPG = data.DefineSourceColumn("mpg", 0, ColumnType.Continuous);
            ColumnDefinition columnCylinders = data.DefineSourceColumn("cylinders", 1, ColumnType.Ordinal);
            // It is very important to predefine ordinals, so that the order is known.
            columnCylinders.DefineClass(new[] {"3", "4", "5", "6", "8"});
            data.DefineSourceColumn("displacement", 2, ColumnType.Continuous);
            ColumnDefinition columnHorsePower = data.DefineSourceColumn("horsepower", 3, ColumnType.Continuous);
            data.DefineSourceColumn("weight", 4, ColumnType.Continuous);
            data.DefineSourceColumn("acceleration", 5, ColumnType.Continuous);
            ColumnDefinition columnModelYear = data.DefineSourceColumn("model_year", 6, ColumnType.Ordinal);
            columnModelYear.DefineClass(new[]
            {"70", "71", "72", "73", "74", "75", "76", "77", "78", "79", "80", "81", "82"});
            data.DefineSourceColumn("origin", 7, ColumnType.Nominal);

            // Define how missing values are represented.
            data.NormHelper.DefineUnknownValue("?");
            data.NormHelper.DefineMissingHandler(columnHorsePower, new MeanMissingHandler());

            // Analyze the data, determine the min/max/mean/sd of every column.
            data.Analyze();

            // Map the prediction column to the output of the model, and all
            // other columns to the input.
            data.DefineSingleOutputOthersInput(columnMPG);

            // Create feedforward neural network as the model type. MLMethodFactory.TYPE_FEEDFORWARD.
            // You could also other model types, such as:
            // MLMethodFactory.SVM:  Support Vector Machine (SVM)
            // MLMethodFactory.TYPE_RBFNETWORK: RBF Neural Network
            // MLMethodFactor.TYPE_NEAT: NEAT Neural Network
            // MLMethodFactor.TYPE_PNN: Probabilistic Neural Network
            var model = new EncogModel(data);
            model.SelectMethod(data, MLMethodFactory.TypeFeedforward);

            // Send any output to the console.
            model.Report = new ConsoleStatusReportable();

            // Now normalize the data.  Encog will automatically determine the correct normalization
            // type based on the model you chose in the last step.
            data.Normalize();

            // Hold back some data for a final validation.
            // Shuffle the data into a random ordering.
            // Use a seed of 1001 so that we always use the same holdback and will get more consistent results.
            model.HoldBackValidation(0.3, true, 1001);

            // Choose whatever is the default training type for this model.
            model.SelectTrainingType(data);

            // Use a 5-fold cross-validated train.  Return the best method found.
            var bestMethod = (IMLRegression) model.Crossvalidate(5, true);

            // Display the training and validation errors.
            Console.WriteLine(@"Training error: " + model.CalculateError(bestMethod, model.TrainingDataset));
            Console.WriteLine(@"Validation error: " + model.CalculateError(bestMethod, model.ValidationDataset));

            // Display our normalization parameters.
            NormalizationHelper helper = data.NormHelper;
            Console.WriteLine(helper.ToString());

            // Display the final model.
            Console.WriteLine("Final model: " + bestMethod);

            // Loop over the entire, original, dataset and feed it through the model.
            // This also shows how you would process new data, that was not part of your
            // training set.  You do not need to retrain, simply use the NormalizationHelper
            // class.  After you train, you can save the NormalizationHelper to later
            // normalize and denormalize your data.
            source.Close();
            var csv = new ReadCSV(filename, false, format);
            var line = new String[7];
            IMLData input = helper.AllocateInputVector();

            while (csv.Next())
            {
                var result = new StringBuilder();

                line[0] = csv.Get(1);
                line[1] = csv.Get(2);
                line[2] = csv.Get(3);
                line[3] = csv.Get(4);
                line[4] = csv.Get(5);
                line[5] = csv.Get(6);
                line[6] = csv.Get(7);

                String correct = csv.Get(0);
                helper.NormalizeInputVector(line, ((BasicMLData) input).Data, false);
                IMLData output = bestMethod.Compute(input);
                String irisChosen = helper.DenormalizeOutputVectorToString(output)[0];

                result.Append(line);
                result.Append(" -> predicted: ");
                result.Append(irisChosen);
                result.Append("(correct: ");
                result.Append(correct);
                result.Append(")");

                Console.WriteLine(result.ToString());
            }
            csv.Close();

            // Delete data file and shut down.
            File.Delete(filename);
            EncogFramework.Instance.Shutdown();
        }
예제 #17
0
파일: CSVFinal.cs 프로젝트: neismit/emds
        /// <summary>
        /// Reads the CSV and call loader.
        /// Used internally to load the csv and place data in the marketdataset.
        /// </summary>
        /// <param name="symbol">The symbol.</param>
        /// <param name="neededTypes">The needed types.</param>
        /// <param name="from">From.</param>
        /// <param name="to">To.</param>
        /// <param name="File">The file.</param>
        /// <returns></returns>
        ICollection<LoadedMarketData> ReadAndCallLoader(
            TickerSymbol symbol, 
            IEnumerable<MarketDataType> neededTypes, 
            DateTime from, 
            DateTime to, 
            string File)
        {
            //We got a file, lets load it.

                ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                ReadCSV csv = new ReadCSV(File, true, CSVFormat.English);
                //In case we want to use a different date format...and have used the SetDateFormat method, our DateFormat must then not be null..
                //We will use the ?? operator to check for nullables.
                csv.DateFormat = DateFormat ?? "yyyy-MM-dd HH:mm:ss";
                csv.TimeFormat = "HH:mm:ss";

                DateTime ParsedDate = from;
                bool writeonce = true;

                while (csv.Next())
                {
                    DateTime date = csv.GetDate(0);
                    ParsedDate = date;

                    if (writeonce)
                    {
                        Console.WriteLine(@"First parsed date in csv:" + ParsedDate.ToShortDateString());
                        Console.WriteLine(@"Stopping at date:" + to.ToShortDateString());
                        Console.WriteLine(@"Current DateTime:" + ParsedDate.ToShortDateString() + @" Time:" +
                                          ParsedDate.ToShortTimeString() + @"  Asked Start date was " +
                                          from.ToShortDateString());
                        writeonce = false;
                    }
                    if (ParsedDate >= from && ParsedDate <= to)
                    {
                        DateTime datex = csv.GetDate(0);
                        double open = csv.GetDouble(1);
                        double close = csv.GetDouble(2);
                        double high = csv.GetDouble(3);
                        double low = csv.GetDouble(4);
                        double volume = csv.GetDouble(5);
                        double range = Math.Abs(open - close);
                        double HighLowRange = Math.Abs(high - low);
                        double DirectionalRange = close - open;
                        LoadedMarketData data = new LoadedMarketData(datex, symbol);
                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.High, high);
                        data.SetData(MarketDataType.Low, low);
                        data.SetData(MarketDataType.Close, close);
                        data.SetData(MarketDataType.Volume, volume);
                        data.SetData(MarketDataType.RangeHighLow, Math.Round(HighLowRange, 6));
                        data.SetData(MarketDataType.RangeOpenClose, Math.Round(range, 6));
                        data.SetData(MarketDataType.RangeOpenCloseNonAbsolute, Math.Round(DirectionalRange, 6));
                        result.Add(data);

                    }

                }

                csv.Close();
                return result;
        }
        /// <summary>
        /// Normalize the input file. Write to the specified file.
        /// </summary>
        ///
        /// <param name="file">The file to write to.</param>
        public void Normalize(FileInfo file)
        {
            if (_analyst == null)
            {
                throw new EncogError(
                    "Can't normalize yet, file has not been analyzed.");
            }

            ReadCSV csv = null;
            StreamWriter tw = null;

            try
            {
                csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, InputFormat);

				file.Delete();
                tw = new StreamWriter(file.OpenWrite());

                // write headers, if needed
                if (ProduceOutputHeaders)
                {
                    WriteHeaders(tw);
                }

                ResetStatus();
                int outputLength = _analyst.DetermineUniqueColumns();

                // write file contents
                while (csv.Next() && !ShouldStop())
                {
                    UpdateStatus(false);

                    double[] output = ExtractFields(
                        _analyst, _analystHeaders, csv, outputLength,
                        false);

                    if (_series.TotalDepth > 1)
                    {
                        output = _series.Process(output);
                    }

                    if (output != null)
                    {
                        var line = new StringBuilder();
                        NumberList.ToList(OutputFormat, line, output);
                        tw.WriteLine(line);
                    }
                }
            }
            catch (IOException e)
            {
                throw new QuantError(e);
            }
            finally
            {
                ReportDone(false);
                if (csv != null)
                {
                    try
                    {
                        csv.Close();
                    }
                    catch (Exception ex)
                    {
                        EncogLogging.Log(ex);
                    }
                }

                if (tw != null)
                {
                    try
                    {
                        tw.Close();
                    }
                    catch (Exception ex)
                    {
                        EncogLogging.Log(ex);
                    }
                }
            }
        }
        /// <summary>
        /// Load financial data from Google.
        /// </summary>
        /// <param name="ticker">The ticker to load from.</param>
        /// <param name="dataNeeded">The data needed.</param>
        /// <param name="from">The starting time.</param>
        /// <param name="to">The ending time.</param>
        /// <returns>The loaded data.</returns>
        public ICollection<LoadedMarketData> Load(TickerSymbol ticker, IList<MarketDataType> dataNeeded, DateTime from,
                                                  DateTime to)
        {
            ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
            Uri url = BuildUrl(ticker, from, to);
            WebRequest http = WebRequest.Create(url);
            var response = (HttpWebResponse) http.GetResponse();

            if (response != null)
                using (Stream istream = response.GetResponseStream())
                {
                    var csv = new ReadCSV(istream, true, CSVFormat.DecimalPoint);

                    while (csv.Next())
                    {
                        DateTime date = csv.GetDate("date");

                        double open = csv.GetDouble("open");
                        double close = csv.GetDouble("close");
                        double high = csv.GetDouble("high");
                        double low = csv.GetDouble("low");
                        double volume = csv.GetDouble("volume");

                        var data =
                            new LoadedMarketData(date, ticker);

                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.Close, close);
                        data.SetData(MarketDataType.High, high);
                        data.SetData(MarketDataType.Low, low);
                        data.SetData(MarketDataType.Open, open);
                        data.SetData(MarketDataType.Volume, volume);
                        result.Add(data);
                    }

                    csv.Close();
                    if (istream != null) istream.Close();
                }
            return result;
        }
예제 #20
0
        /// <summary>
        /// Convert a CSV file to binary.
        /// </summary>
        /// <param name="csvFile">The CSV file to convert.</param>
        /// <param name="format">The format.</param>
        /// <param name="binFile">The binary file.</param>
        /// <param name="input">The input.</param>
        /// <param name="ideal">The ideal.</param>
        /// <param name="headers">True, if headers are present.</param>
        public static void ConvertCSV2Binary(FileInfo csvFile, CSVFormat format,
            FileInfo binFile, int[] input, int[] ideal, bool headers)
        {
            binFile.Delete();
            var csv = new ReadCSV(csvFile.ToString(), headers, format);

            var buffer = new BufferedMLDataSet(binFile.ToString());
            buffer.BeginLoad(input.Length, ideal.Length);
            while (csv.Next())
            {
                var inputData = new BasicMLData(input.Length);
                var idealData = new BasicMLData(ideal.Length);

                // handle input data
                for (int i = 0; i < input.Length; i++)
                {
                    inputData[i] = csv.GetDouble(input[i]);
                }

                // handle input data
                for (int i = 0; i < ideal.Length; i++)
                {
                    idealData[i] = csv.GetDouble(ideal[i]);
                }

                // add to dataset

                buffer.Add(inputData, idealData);
            }
            buffer.EndLoad();
            buffer.Close();
            csv.Close();
        }
예제 #21
0
파일: ShuffleCSV.cs 프로젝트: neismit/emds
        /// <summary>
        /// Process, and generate the output file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file.</param>
        public void Process(FileInfo outputFile)
        {
            ValidateAnalyzed();

            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);
            LoadedRow row;

            StreamWriter tw = PrepareOutputFile(outputFile);

            ResetStatus();
            while ((row = GetNextRow(csv)) != null)
            {
                WriteRow(tw, row);
                UpdateStatus(false);
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
        /// <summary>
        /// Process the file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file.</param>
        /// <param name="method">THe method to use.</param>
        public void Process(FileInfo outputFile, IMLMethod method)
        {
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

            IMLData output;

            int outputLength = _analyst.DetermineTotalInputFieldCount();

            StreamWriter tw = PrepareOutputFile(outputFile);

            ResetStatus();
            while (csv.Next())
            {
                UpdateStatus(false);
                var row = new LoadedRow(csv, _outputColumns);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst,
                                                                        _analystHeaders, csv, outputLength, true);
                if (_series.TotalDepth > 1)
                {
                    inputArray = _series.Process(inputArray);
                }

                if (inputArray != null)
                {
                    IMLData input = new BasicMLData(inputArray);

                    // evaluation data
                    if ((method is IMLClassification)
                        && !(method is IMLRegression))
                    {
                        // classification only?
                        output = new BasicMLData(1);
                        output[0] =
                            ((IMLClassification) method).Classify(input);
                    }
                    else
                    {
                        // regression
                        output = ((IMLRegression) method).Compute(input);
                    }

                    // skip file data
                    int index = _fileColumns;
                    int outputIndex = 0;


                    // display output
                    foreach (AnalystField field  in  _analyst.Script.Normalize.NormalizedFields)
                    {
                        if (_analystHeaders.Find(field.Name) != -1)
                        {
                            if (field.Output)
                            {
                                if (field.Classify)
                                {
                                    // classification
                                    ClassItem cls = field.DetermineClass(
                                        outputIndex, output.Data);
                                    outputIndex += field.ColumnsNeeded;
                                    if (cls == null)
                                    {
                                        row.Data[index++] = "?Unknown?";
                                    }
                                    else
                                    {
                                        row.Data[index++] = cls.Name;
                                    }
                                }
                                else
                                {
                                    // regression
                                    double n = output[outputIndex++];
                                    n = field.DeNormalize(n);
                                    row.Data[index++] = Format
                                        .Format(n, Precision);
                                }
                            }
                        }
                    }
                }

                WriteRow(tw, row);
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
예제 #23
0
 public void Process(EncogAnalyst target)
 {
     string text1;
     int num;
     int num2;
     string str;
     bool flag;
     bool flag2;
     bool flag3;
     AnalyzedField field3;
     int num3;
     IList<AnalystClassItem> analyzedClassMembers;
     IList<AnalystClassItem> classMembers;
     int num4;
     DataField[] fieldArray;
     int num5;
     AnalyzedField[] fieldArray2;
     int num6;
     AnalyzedField[] fieldArray3;
     int num7;
     AnalyzedField[] fieldArray4;
     int num8;
     CSVFormat format = ConvertStringConst.ConvertToCSVFormat(this._x5786461d089b10a0);
     ReadCSV dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format);
     Label_0676:
     if (dcsv.Next())
     {
         if (this._xa942970cc8a85fd4 == null)
         {
             this.xd2a854890d89a856(dcsv);
         }
         num = 0;
         while (num < dcsv.ColumnCount)
         {
             if (this._xa942970cc8a85fd4 != null)
             {
                 this._xa942970cc8a85fd4[num].Analyze1(dcsv.Get(num));
             }
             num++;
         }
         if (((uint) num2) >= 0)
         {
             goto Label_0676;
         }
     }
     else if (this._xa942970cc8a85fd4 != null)
     {
         fieldArray2 = this._xa942970cc8a85fd4;
     }
     else
     {
         if ((((uint) num8) & 0) == 0)
         {
             goto Label_05F5;
         }
         goto Label_05D0;
     }
     if ((((uint) num2) - ((uint) flag2)) >= 0)
     {
         for (num6 = 0; num6 < fieldArray2.Length; num6++)
         {
             fieldArray2[num6].CompletePass1();
         }
         goto Label_05F5;
     }
     goto Label_05D0;
     Label_0011:
     num5++;
     if (((uint) num2) < 0)
     {
         goto Label_0251;
     }
     Label_002C:
     if (num5 < fieldArray.Length)
     {
         fieldArray[num5] = this._xa942970cc8a85fd4[num5].FinalizeField();
         if ((((uint) num6) + ((uint) num)) <= uint.MaxValue)
         {
             goto Label_0011;
         }
         if ((((uint) num8) | 3) != 0)
         {
             goto Label_00E8;
         }
     }
     else
     {
         if (((uint) flag2) > uint.MaxValue)
         {
             goto Label_0336;
         }
         target.Script.Fields = fieldArray;
         return;
     }
     Label_00A6:
     if (this._xa942970cc8a85fd4.Length == target.Script.Fields.Length)
     {
         num3 = 0;
         goto Label_00EE;
     }
     if ((((uint) flag3) & 0) != 0)
     {
         goto Label_0248;
     }
     Label_00D7:
     fieldArray = new DataField[this._xa942970cc8a85fd4.Length];
     if ((((uint) num6) + ((uint) num4)) >= 0)
     {
         num5 = 0;
         goto Label_002C;
     }
     goto Label_0011;
     Label_00E8:
     num3++;
     Label_00EE:
     if (num3 < this._xa942970cc8a85fd4.Length)
     {
         this._xa942970cc8a85fd4[num3].Name = target.Script.Fields[num3].Name;
         if (!this._xa942970cc8a85fd4[num3].Class)
         {
             goto Label_00E8;
         }
         analyzedClassMembers = this._xa942970cc8a85fd4[num3].AnalyzedClassMembers;
         classMembers = target.Script.Fields[num3].ClassMembers;
         if (classMembers.Count != analyzedClassMembers.Count)
         {
             goto Label_00E8;
         }
         num4 = 0;
         if (((uint) num2) > uint.MaxValue)
         {
             goto Label_0341;
         }
         goto Label_0195;
     }
     goto Label_00D7;
     Label_018F:
     num4++;
     Label_0195:
     if (num4 < classMembers.Count)
     {
         if (analyzedClassMembers[num4].Code.Equals(classMembers[num4].Code))
         {
             analyzedClassMembers[num4].Name = classMembers[num4].Name;
         }
         goto Label_018F;
     }
     goto Label_00E8;
     Label_0238:
     if (num8 < fieldArray4.Length)
     {
         field3 = fieldArray4[num8];
         if ((((uint) num) & 0) != 0)
         {
             goto Label_02FF;
         }
         if (field3.Class)
         {
             if (flag)
             {
                 goto Label_0350;
             }
             if (((uint) num4) <= uint.MaxValue)
             {
                 goto Label_03E6;
             }
             goto Label_040F;
         }
         goto Label_0251;
     }
     if (target.Script.Fields != null)
     {
         goto Label_00A6;
     }
     if ((((uint) num8) + ((uint) num5)) > uint.MaxValue)
     {
         goto Label_0341;
     }
     goto Label_00D7;
     Label_0248:
     if (field3.Integer && (field3.AnalyzedClassMembers.Count <= 2))
     {
         if ((((uint) num6) - ((uint) num5)) >= 0)
         {
             if ((((uint) num7) | 4) == 0)
             {
                 goto Label_059B;
             }
             field3.Class = false;
         }
         else
         {
             if ((((uint) flag2) + ((uint) num8)) >= 0)
             {
                 goto Label_0350;
             }
             goto Label_02FF;
         }
     }
     Label_0251:
     num8++;
     goto Label_0238;
     Label_02FF:
     if (!flag2 && (field3.Real && !field3.Integer))
     {
         field3.Class = false;
     }
     goto Label_0248;
     Label_030B:
     if ((((uint) flag3) - ((uint) num2)) >= 0)
     {
         goto Label_02FF;
     }
     Label_0341:
     while (!field3.Real)
     {
         field3.Class = false;
         goto Label_02FF;
     Label_0336:
         if (field3.Integer)
         {
             goto Label_030B;
         }
     }
     goto Label_0368;
     Label_0350:
     if (flag3)
     {
         goto Label_02FF;
     }
     if ((((uint) num5) & 0) == 0)
     {
         goto Label_0336;
     }
     Label_0368:
     if ((((uint) flag) - ((uint) num4)) >= 0)
     {
         goto Label_02FF;
     }
     goto Label_030B;
     Label_03E6:
     if (!field3.Integer)
     {
         goto Label_0350;
     }
     Label_040F:
     field3.Class = false;
     if ((((uint) num7) + ((uint) flag)) < 0)
     {
         goto Label_05F5;
     }
     if ((((uint) num3) + ((uint) flag)) > uint.MaxValue)
     {
         goto Label_04D6;
     }
     if ((((uint) num3) & 0) == 0)
     {
         goto Label_0350;
     }
     goto Label_03E6;
     Label_04A8:
     dcsv.Close();
     Label_04AE:
     text1 = this._x594135906c55045c.Properties.GetPropertyString("SETUP:CONFIG_allowedClasses");
     if (text1 != null)
     {
         str = text1;
     }
     else
     {
         if (0 != 0)
         {
             goto Label_02FF;
         }
         str = "";
     }
     flag = str.Contains("int");
     if (str.Contains("real"))
     {
     }
     flag2 = true;
     flag3 = str.Contains("string");
     fieldArray4 = this._xa942970cc8a85fd4;
     num8 = 0;
     goto Label_0238;
     Label_04D6:
     if (num7 >= fieldArray3.Length)
     {
     }
     AnalyzedField field2 = fieldArray3[num7];
     if (((uint) num4) < 0)
     {
         goto Label_04AE;
     }
     field2.CompletePass2();
     num7++;
     if ((((uint) num4) + ((uint) num2)) < 0)
     {
         goto Label_018F;
     }
     if ((((uint) num3) + ((uint) flag)) >= 0)
     {
         goto Label_04D6;
     }
     Label_0554:
     if (((uint) flag) > uint.MaxValue)
     {
         goto Label_059B;
     }
     goto Label_04D6;
     if ((((uint) num2) | 0x7fffffff) != 0)
     {
         if ((((uint) flag2) - ((uint) num3)) >= 0)
         {
             goto Label_04A8;
         }
         goto Label_05D0;
     }
     Label_058B:
     num2++;
     Label_0591:
     if (num2 < dcsv.ColumnCount)
     {
         goto Label_05D0;
     }
     Label_059B:
     if (dcsv.Next())
     {
         num2 = 0;
         goto Label_0591;
     }
     if (this._xa942970cc8a85fd4 != null)
     {
         fieldArray3 = this._xa942970cc8a85fd4;
         num7 = 0;
         goto Label_0554;
     }
     goto Label_04A8;
     Label_05D0:
     if (this._xa942970cc8a85fd4 != null)
     {
         this._xa942970cc8a85fd4[num2].Analyze2(dcsv.Get(num2));
     }
     goto Label_058B;
     Label_05F5:
     dcsv.Close();
     if (((uint) num) >= 0)
     {
     }
     dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format);
     goto Label_059B;
 }
예제 #24
0
 public void Process()
 {
     ReadCSV dcsv;
     this.x461c3bf969128260();
     Label_0006:
     dcsv = new ReadCSV(base.InputFilename.ToString(), base.ExpectInputHeaders, base.InputFormat);
     base.ResetStatus();
     using (IEnumerator<SegregateTargetPercent> enumerator = this._x2ea7a1eff81ae7c0.GetEnumerator())
     {
         SegregateTargetPercent percent;
         StreamWriter writer;
         goto Label_0044;
     Label_0038:
         if (0 != 0)
         {
             goto Label_00D1;
         }
     Label_003E:
         writer.Close();
     Label_0044:
         if (enumerator.MoveNext())
         {
             goto Label_00D1;
         }
         if (0 == 0)
         {
             goto Label_00EE;
         }
         if (0 == 0)
         {
             goto Label_00D1;
         }
         if (0 == 0)
         {
             goto Label_00BC;
         }
         if (0 == 0)
         {
             goto Label_0098;
         }
         goto Label_003E;
     Label_0067:
         if (percent.NumberRemaining > 0)
         {
             goto Label_0086;
         }
         if (0 == 0)
         {
             goto Label_00B9;
         }
         goto Label_0098;
     Label_0075:
         percent.NumberRemaining--;
     Label_0083:
         if (0 == 0)
         {
             goto Label_0067;
         }
     Label_0086:
         if (!dcsv.Next() || base.ShouldStop())
         {
             goto Label_003E;
         }
     Label_0098:
         base.UpdateStatus(false);
         LoadedRow row = new LoadedRow(dcsv);
         base.WriteRow(writer, row);
         if (4 == 0)
         {
             goto Label_0083;
         }
         goto Label_0075;
     Label_00B9:
         if (0 == 0)
         {
             goto Label_00CE;
         }
     Label_00BC:
         writer = base.PrepareOutputFile(percent.Filename);
         goto Label_0067;
     Label_00CE:
         if (0 == 0)
         {
             goto Label_0038;
         }
     Label_00D1:
         percent = enumerator.Current;
         goto Label_00BC;
     }
     Label_00EE:
     base.ReportDone(false);
     if (0 != 0)
     {
         goto Label_0006;
     }
     dcsv.Close();
 }
예제 #25
0
        /// <summary>
        ///     Private constructor.
        /// </summary>
        private PropertyConstraints()
        {
            _data = new Dictionary<String, List<PropertyEntry>>();
            try
            {
                Stream mask0 = ResourceLoader.CreateStream("Encog.Resources.analyst.csv");
                var csv = new ReadCSV(mask0, false, CSVFormat.EgFormat);

                while (csv.Next())
                {
                    String sectionStr = csv.Get(0);
                    String nameStr = csv.Get(1);
                    String typeStr = csv.Get(2);

                    // determine type
                    PropertyType t;
                    if ("boolean".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeBoolean;
                    }
                    else if ("real".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeDouble;
                    }
                    else if ("format".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeFormat;
                    }
                    else if ("int".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeInteger;
                    }
                    else if ("list-string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeListString;
                    }
                    else if ("string".Equals(typeStr, StringComparison.InvariantCultureIgnoreCase))
                    {
                        t = PropertyType.TypeString;
                    }
                    else
                    {
                        throw new AnalystError("Unknown type constraint: "
                                               + typeStr);
                    }

                    var entry = new PropertyEntry(t, nameStr,
                                                  sectionStr);
                    List<PropertyEntry> list;

                    if (_data.ContainsKey(sectionStr))
                    {
                        list = _data[sectionStr];
                    }
                    else
                    {
                        list = new List<PropertyEntry>();
                        _data[sectionStr] = list;
                    }

                    list.Add(entry);
                }

                csv.Close();
                mask0.Close();
            }
            catch (IOException e)
            {
                throw new EncogError(e);
            }
        }
예제 #26
0
        /// <summary>
        /// Reads and parses CSV data from file
        /// </summary>
        /// <param name="ticker">Ticker associated with CSV file</param>
        /// <param name="neededTypes">Columns to parse (headers)</param>
        /// <param name="from">DateTime from</param>
        /// <param name="to">DateTime to</param>
        /// <param name="File">Filepath to CSV</param>
        /// <returns>Marketdata</returns>
        public ICollection<LoadedMarketData> ReadAndCallLoader(TickerSymbol ticker, IList<MarketDataType> neededTypes, DateTime from, DateTime to, string File)
        {
            try
            {
                LoadedFile = File;
                Console.WriteLine("Loading instrument: " + ticker.Symbol + " from: " + File);
                //We got a file, lets load it.
                ICollection<LoadedMarketData> result = new List<LoadedMarketData>();
                ReadCSV csv = new ReadCSV(File, true, LoadedFormat);
                if (DateTimeDualColumn)
                {
                    csv.DateFormat = DateFormat;
                    csv.TimeFormat =  TimeFormat;
                }
                else
                {
                    csv.DateFormat = DateFormat;
                }

                //"Date","Time","Open","High","Low","Close","Volume"
                while (csv.Next())
                {
                    string datetime = "";
                    if (DateTimeDualColumn)
                    {
                        datetime = csv.GetDate("Date").ToShortDateString() + " " +
                                          csv.GetTime("Time").ToShortTimeString();
                    }
                    else
                    {
                        datetime = csv.GetDate("Date").ToShortDateString();
                    }
                    DateTime date = DateTime.Parse(datetime);
                    if (date > from && date < to)
                    {
                        // CSV columns
                        double open = csv.GetDouble("Open");
                        double high = csv.GetDouble("High");
                        double low = csv.GetDouble("Low");
                        double close = csv.GetDouble("Close");
                        double volume = csv.GetDouble("Volume");

                        LoadedMarketData data = new LoadedMarketData(date, ticker);
                        foreach (MarketDataType marketDataType in neededTypes)
                        {
                            switch (marketDataType.ToString())
                            {
                                case "Open":
                                    data.SetData(MarketDataType.Open, open);
                                    break;
                                case "High":
                                    data.SetData(MarketDataType.High, high);
                                    break;
                                case "Low":
                                    data.SetData(MarketDataType.Low, low);
                                    break;
                                case "Close":
                                    data.SetData(MarketDataType.Close, close);
                                    break;
                                case "Volume":
                                    data.SetData(MarketDataType.Volume, volume);
                                    break;
                                case "RangeHighLow":
                                    data.SetData(MarketDataType.RangeHighLow, Math.Round(Math.Abs(high - low), 6));
                                    break;
                                case "RangeOpenClose":
                                    data.SetData(MarketDataType.RangeOpenClose, Math.Round(Math.Abs(close - open), 6));
                                    break;
                                case "RangeOpenCloseNonAbsolute":
                                    data.SetData(MarketDataType.RangeOpenCloseNonAbsolute, Math.Round(close - open, 6));
                                    break;
                                case "Weighted":
                                    data.SetData(MarketDataType.Weighted, Math.Round((high + low + 2 * close) / 4, 6));
                                    break;
                            }
                        }
                        result.Add(data);
                    }
                }
                csv.Close();
                return result;
            }

            catch (Exception ex)
            {
                Console.WriteLine("Something went wrong reading the csv: " + ex.Message);
            }
            return null;
        }
        /// <summary>
        ///     Process the input file and segregate into the output files.
        /// </summary>
        public void Process()
        {
            Validate();

            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);
            ResetStatus();

            foreach (SegregateTargetPercent target  in  _targets)
            {
                StreamWriter tw = PrepareOutputFile(target.Filename);

                while ((target.NumberRemaining > 0) && csv.Next()
                       && !ShouldStop())
                {
                    UpdateStatus(false);
                    var row = new LoadedRow(csv);
                    WriteRow(tw, row);
                    target.NumberRemaining = target.NumberRemaining - 1;
                }

                tw.Close();
            }
            ReportDone(false);
            csv.Close();
        }
        /// <summary>
        ///     Perform a basic analyze of the file. This method is used mostly
        ///     internally.
        /// </summary>
        public void PerformBasicCounts()
        {
            ResetStatus();
            int rc = 0;
            var csv = new ReadCSV(_inputFilename.ToString(),
                                  _expectInputHeaders, _format);
            while (csv.Next() && !_cancel)
            {
                UpdateStatus(true);
                rc++;
            }
            _recordCount = rc;
            _columnCount = csv.ColumnCount;

            ReadHeaders(csv);
            csv.Close();
            ReportDone(true);
        }
예제 #29
0
        /// <summary>
        /// Process the file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file.</param>
        /// <param name="method">The method to use.</param>
        public void Process(FileInfo outputFile, IMLRegression method)
        {
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

            if (method.InputCount > _inputCount)
            {
                throw new AnalystError("This machine learning method has "
                                       + method.InputCount
                                       + " inputs, however, the data has " + _inputCount
                                       + " inputs.");
            }

            var input = new BasicMLData(method.InputCount);

            StreamWriter tw = AnalystPrepareOutputFile(outputFile);

            ResetStatus();
            while (csv.Next())
            {
                UpdateStatus(false);
                var row = new LoadedRow(csv, _idealCount);

                int dataIndex = 0;
                // load the input data
                for (int i = 0; i < _inputCount; i++)
                {
                    String str = row.Data[i];
                    double d = Format.Parse(str);
                    input[i] = d;
                    dataIndex++;
                }

                // do we need to skip the ideal values?
                dataIndex += _idealCount;

                // compute the result
                IMLData output = method.Compute(input);

                // display the computed result
                for (int i = 0; i < _outputCount; i++)
                {
                    double d = output[i];
                    row.Data[dataIndex++] = Format.Format(d, Precision);
                }

                WriteRow(tw, row);
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
예제 #30
0
        /// <summary>
        /// Read the input file.
        /// </summary>
        ///
        private void ReadInputFile()
        {
            ResetStatus();

            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, InputFormat);
            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus("Reading input file");
                var row = new LoadedRow(csv);
                _data.Add(row);
            }

            Count = csv.ColumnCount;

            if (ExpectInputHeaders)
            {
                InputHeadings = new String[csv.ColumnCount];
                for (int i = 0; i < csv.ColumnCount; i++)
                {
                    InputHeadings[i] = csv.ColumnNames[i];
                }
            }

            csv.Close();
        }