Example #1
0
        static void Evaluate()
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile);
            var analyst = new EncogAnalyst();

            analyst.Load(Config.AnalystFile.ToString());
            var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateFile.ToString(),
                                                            network.InputCount, network.OutputCount, true, CSVFormat.English, false);

            using (var file = new System.IO.StreamWriter(Config.ValidationResult.ToString()))
            {
                file.WriteLine("Ideal,Actual");
                foreach (var item in evaluationSet)
                {
                    // WAS 8
                    var NormalizedActualoutput = (BasicMLData)network.Compute(item.Input);
                    var Actualoutput           = analyst.Script.Normalize.NormalizedFields[8].DeNormalize(NormalizedActualoutput.Data[0]);
                    var IdealOutput            = analyst.Script.Normalize.NormalizedFields[8].DeNormalize(item.Ideal[0]);

                    //Write to File
                    var resultLine = IdealOutput.ToString() + "," + Actualoutput.ToString();
                    file.WriteLine(resultLine);
                    Console.WriteLine("Ideal : {0}, Actual : {1}", IdealOutput, Actualoutput);
                }
            }
        }
Example #2
0
        public string Evaluate(FileInfo TrainedNetworkFile, FileInfo AnalystFile, FileInfo NormalizedEvaluateFile)
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(TrainedNetworkFile);
            var analyst = new EncogAnalyst();

            analyst.Load(AnalystFile.ToString());
            var evaluationSet = EncogUtility.LoadCSV2Memory(NormalizedEvaluateFile.ToString(),
                                                            network.InputCount, network.OutputCount, true, CSVFormat.English, false);
            var career = string.Empty;

            foreach (var item in evaluationSet)
            {
                var output = network.Compute(item.Input);

                int    classCount        = analyst.Script.Normalize.NormalizedFields[3].Classes.Count;
                double normalizationHigh = analyst.Script.Normalize.NormalizedFields[3].NormalizedHigh;
                double normalizationLow  = analyst.Script.Normalize.NormalizedFields[3].NormalizedLow;

                var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                var predictedClassInt = eq.Decode(output);

                career = analyst.Script.Normalize.NormalizedFields[3].Classes[predictedClassInt].Name;
            }

            return(career);
        }
Example #3
0
        public void TestRegression()
        {
            FileInfo rawFile    = TEMP_DIR.CreateFile("EncogCSharp/Resources/simple.csv");
            FileInfo egaFile    = TEMP_DIR.CreateFile("EncogCSharp/Resources/simple.ega");
            FileInfo outputFile = TEMP_DIR.CreateFile("EncogCSharp/Resources/simple_output.csv");

            FileUtil.CopyResource("EncogCSharp/Resources/simple.csv", rawFile);
            FileUtil.CopyResource("EncogCSharp/Resources/simple-r.ega", egaFile);

            EncogAnalyst analyst = new EncogAnalyst();

            analyst.Load(egaFile);

            analyst.ExecuteTask("task-full");

            ReadCSV csv = new ReadCSV(outputFile.ToString(), true, CSVFormat.English);

            while (csv.Next())
            {
                double diff = Math.Abs(csv.GetDouble(2) - csv.GetDouble(4));
                Assert.IsTrue(diff < 1.5);
            }

            Assert.AreEqual(4, analyst.Script.Fields.Length);
            Assert.AreEqual(3, analyst.Script.Fields[3].ClassMembers.Count);

            csv.Close();
        }
Example #4
0
        public void TestClassification()
        {
            FileInfo rawFile    = TEMP_DIR.CreateFile("simple.csv");
            FileInfo egaFile    = TEMP_DIR.CreateFile("simple.ega");
            FileInfo outputFile = TEMP_DIR.CreateFile("simple_output.csv");

            FileUtil.CopyResource("simple.csv", rawFile);
            FileUtil.CopyResource("simple.ega", egaFile);

            EncogAnalyst analyst = new EncogAnalyst();

            analyst.AddAnalystListener(new ConsoleAnalystListener());
            analyst.Load(egaFile);

            analyst.ExecuteTask("task-full");

            ReadCSV csv = new ReadCSV(outputFile.ToString(), true, CSVFormat.English);

            while (csv.Next())
            {
                Assert.AreEqual(csv.Get(3), csv.Get(4));
            }

            Assert.AreEqual(4, analyst.Script.Fields.Length);
            Assert.AreEqual(3, analyst.Script.Fields[3].ClassMembers.Count);

            csv.Close();
        }
Example #5
0
        /// <summary>
        ///     Analyze the data. This counts the records and prepares the data to be
        ///     processed.
        /// </summary>
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file.</param>
        /// <param name="headers">True if headers are present.</param>
        /// <param name="format">The format the file is in.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename      = inputFile;
            ExpectInputHeaders = headers;
            Format             = format;
            _analyst           = theAnalyst;

            Analyzed = true;

            PerformBasicCounts();

            _inputCount  = _analyst.DetermineInputCount();
            _outputCount = _analyst.DetermineOutputCount();
            _idealCount  = InputHeadings.Length - _inputCount;

            if ((InputHeadings.Length != _inputCount) &&
                (InputHeadings.Length != (_inputCount + _outputCount)))
            {
                throw new AnalystError("Invalid number of columns("
                                       + InputHeadings.Length + "), must match input("
                                       + _inputCount + ") count or input+output("
                                       + (_inputCount + _outputCount) + ") count.");
            }
        }
Example #6
0
        private static void Step3()
        {
            Console.WriteLine("Step 3: Normalize Training and Evaluation Data");

            //Analyst
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);

            //Norm for Training
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm for evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedEvaluateFile);

            analyst.Save(Config.AnalystFile);
        }
Example #7
0
        /**
         * Generate the code from Encog Analyst.
         *
         * @param analyst
         *            The Encog Analyst object to use for code generation.
         */

        public void Generate(EncogAnalyst analyst)
        {
            if (targetLanguage == TargetLanguage.MQL4 ||
                targetLanguage == TargetLanguage.NinjaScript)
            {
                if (!EmbedData)
                {
                    throw new AnalystCodeGenerationError(
                              "MQL4 and Ninjascript must be embedded.");
                }
            }

            if (generator is IProgramGenerator)
            {
                String methodID =
                    analyst.Script.Properties.GetPropertyString(ScriptProperties.MlConfigMachineLearningFile);

                String trainingID = analyst.Script.Properties.GetPropertyString(ScriptProperties.MlConfigTrainingFile);

                FileInfo methodFile   = analyst.Script.ResolveFilename(methodID);
                FileInfo trainingFile = analyst.Script.ResolveFilename(trainingID);

                Generate(methodFile, trainingFile);
            }
            else
            {
                ((ITemplateGenerator)generator).Generate(analyst);
            }
        }
Example #8
0
        public static void NormalizeData()
        {
            var toNormalize = new FileInfo(csvPath);
            var Normalized  = new FileInfo(csvPathNormalized);
            var analyst     = new EncogAnalyst();


            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(toNormalize, true, AnalystFileFormat.DecpntComma);

            var norm = new AnalystNormalizeCSV();

            norm.Analyze(toNormalize, true, CSVFormat.English, analyst);
            foreach (AnalystField field in analyst.Script.Normalize.NormalizedFields)
            {
                field.NormalizedHigh = 1;
                field.NormalizedLow  = -1;
                field.Action         = Encog.Util.Arrayutil.NormalizationAction.Normalize;
                if (field.Name == "cls")
                {
                    field.Action = Encog.Util.Arrayutil.NormalizationAction.PassThrough;
                }
            }


            norm.ProduceOutputHeaders = true;
            norm.Normalize(Normalized);
        }
Example #9
0
        /// <summary>
        /// Process the file and cluster.
        /// </summary>
        ///
        /// <param name="outputFile">The output file.</param>
        /// <param name="clusters">The number of clusters.</param>
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="iterations">The number of iterations to use.</param>
        public void Process(FileInfo outputFile, int clusters,
                            EncogAnalyst theAnalyst, int iterations)
        {
            StreamWriter tw = PrepareOutputFile(outputFile);

            ResetStatus();

            var cluster = new KMeansClustering(clusters,
                                               _data);

            cluster.Iteration(iterations);

            int clusterNum = 0;

            foreach (IMLCluster cl  in  cluster.Clusters)
            {
                foreach (IMLData item  in  cl.Data)
                {
                    var       row      = (ClusterRow)item;
                    int       clsIndex = row.Input.Count - 1;
                    LoadedRow lr       = row.Row;
                    lr.Data[clsIndex] = "" + clusterNum;
                    WriteRow(tw, lr);
                }
                clusterNum++;
            }

            ReportDone(false);
            tw.Close();
        }
Example #10
0
        /// <see cref="INetwork.Normalize"/>
        public INetwork Normalize()
        {
            //Analyst
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(BaseFile, true, AnalystFileFormat.DecpntComma);

            // Configure normalized fields
            ConfigureNormalizedFields(analyst);

            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(EvaluateFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedEvaluateFile);

            //Save the analyst file
            analyst.Save(AnalystFile);

            return(this);
        }
Example #11
0
        /// <see cref="INetwork.Evaluate"/>
        public INetwork Evaluate()
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(TrainedNetworkFile);
            var analyst = new EncogAnalyst();

            analyst.Load(AnalystFile.ToString());

            var evaluationSet = EncogUtility.LoadCSV2Memory(NormalizedEvaluateFile.ToString(),
                                                            network.InputCount, network.OutputCount, true, CSVFormat.English, false);

            int count = 0;

            using (var file = new StreamWriter(EvaluateFileOutput.ToString())) {
                file.WriteLine("Target, Predict");

                foreach (var item in evaluationSet)
                {
                    count++;

                    var output  = network.Compute(item.Input);
                    var predict = analyst.Script.Normalize.NormalizedFields[43].DeNormalize(output[0]);
                    var target  = analyst.Script.Normalize.NormalizedFields[43].DeNormalize(item.Ideal[0]);

                    PredictResult.Add(new PredictResultModel {
                        Target = target, Predict = predict
                    });

                    var resultLine = string.Format("{0},{1}", target, predict);
                    file.WriteLine(resultLine);
                }
            }

            return(this);
        }
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            Console.WriteLine("Running wizard...");
            var analyst = new EncogAnalyst();

            var wizard = new AnalystWizard(analyst);

            wizard.TargetFieldName = "field:1";
            wizard.Wizard(sourceCSV,
                          false, AnalystFileFormat.DecpntComma);


            // customer id
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.PassThrough;

            var norm = new AnalystNormalizeCSV();

            norm.Report = new ConsoleStatusReportable();
            Console.WriteLine("Analyze for normalize...");
            norm.Analyze(sourceCSV, false, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            Console.WriteLine("Normalize...");
            norm.Normalize(targetCSV);
            analyst.Save(scriptEGA);
        }
Example #13
0
        public void Execute(IExampleInterface app)
        {
            if (app.Args.Length != 2)
            {
                Console.WriteLine(@"Note: This example assumes that headers are present in the CSV files.");
                Console.WriteLine(@"NormalizeFile [input file] [target file]");
            }
            else
            {
                var sourceFile = new FileInfo(app.Args[0]);
                var targetFile = new FileInfo(app.Args[1]);

                var analyst = new EncogAnalyst();
                var wizard  = new AnalystWizard(analyst);
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                DumpFieldInfo(analyst);

                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);
                EncogFramework.Instance.Shutdown();
            }
        }
Example #14
0
        private void AnalystCommand()
        {
            String egaFile;
            String task;

            if (_cmd.Args.Count == 0)
            {
                Console.WriteLine(@"Must specify the EGA file to run");
                return;
            }

            if (_cmd.Args.Count == 1)
            {
                egaFile = _cmd.Args[0];
                task    = "task-full";
            }
            else
            {
                egaFile = _cmd.Args[0];
                task    = _cmd.Args[1];
            }

            _sw.Start();
            var analyst = new EncogAnalyst();

            analyst.AddAnalystListener(new ConsoleAnalystListener());
            analyst.Load(new FileInfo(egaFile));
            analyst.ExecuteTask(task);
        }
Example #15
0
        /// <summary>
        ///     Analyze the data. This counts the records and prepares the data to be
        ///     processed.
        /// </summary>
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file.</param>
        /// <param name="headers">True if headers are present.</param>
        /// <param name="format">The format the file is in.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename = inputFile;
            ExpectInputHeaders = headers;
            Format = format;
            _analyst = theAnalyst;

            Analyzed = true;

            PerformBasicCounts();

            _inputCount = _analyst.DetermineInputCount();
            _outputCount = _analyst.DetermineOutputCount();
            _idealCount = InputHeadings.Length - _inputCount;

            if ((InputHeadings.Length != _inputCount)
                && (InputHeadings.Length != (_inputCount + _outputCount)))
            {
                throw new AnalystError("Invalid number of columns("
                                       + InputHeadings.Length + "), must match input("
                                       + _inputCount + ") count or input+output("
                                       + (_inputCount + _outputCount) + ") count.");
            }
        }
        public void Evaluate(FileInfo networkFile, FileInfo analystFile, FileInfo EvaluationFile, FileInfo validationResultsFile)
        {
            var network = EncogDirectoryPersistence.LoadObject(networkFile) as BasicNetwork;
            var analyst = new EncogAnalyst();

            analyst.Load(analystFile);

            var evaluationSet = EncogUtility.LoadCSV2Memory(EvaluationFile.ToString(), network.InputCount,
                                                            network.OutputCount, true, CSVFormat.English, false);

            using (var file = new StreamWriter(validationResultsFile.ToString()))
            {
                foreach (var item in evaluationSet)
                {
                    var normalizedActualOutput = (BasicMLData)network.Compute(item.Input);
                    var actualOutput           = analyst.Script.Normalize.NormalizedFields[8]
                                                 .DeNormalize(normalizedActualOutput.Data[0]);
                    var idealOutput = analyst.Script.Normalize.NormalizedFields[8]
                                      .DeNormalize(item.Ideal[0]);

                    file.WriteLine($"{idealOutput}, {actualOutput}");
                    Console.WriteLine($"Ideal: {idealOutput} | Actual: {actualOutput}");
                }
            }
        }
Example #17
0
 private static void ConfigureNormalizedFields(EncogAnalyst analyst)
 {
     foreach (var field in analyst.Script.Normalize.NormalizedFields)
     {
         field.NormalizedHigh = 1;
         field.NormalizedLow  = 0;
     }
 }
        /// <summary>
        ///     Construct the object.
        /// </summary>
        /// <param name="theAnalyst">The analyst.</param>
        /// <param name="theBackwardWindowSize">The backward window size.</param>
        /// <param name="theForwardWindowSize">The forward window size.</param>
        public AnalystProcess(EncogAnalyst theAnalyst, int theBackwardWindowSize, int theForwardWindowSize)
        {
            analyst = theAnalyst;

            backwardWindowSize = theBackwardWindowSize;
            forwardWindowSize = theForwardWindowSize;
            StandardExtensions.CreateAll(programContext);
        }
Example #19
0
        /// <summary>
        ///     Construct the object.
        /// </summary>
        /// <param name="theAnalyst">The analyst.</param>
        /// <param name="theBackwardWindowSize">The backward window size.</param>
        /// <param name="theForwardWindowSize">The forward window size.</param>
        public AnalystProcess(EncogAnalyst theAnalyst, int theBackwardWindowSize, int theForwardWindowSize)
        {
            analyst = theAnalyst;

            backwardWindowSize = theBackwardWindowSize;
            forwardWindowSize  = theForwardWindowSize;
            StandardExtensions.CreateAll(programContext);
        }
Example #20
0
 /// <inheritdoc/>
 public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat)
 {
     var result = new double[stat.ColumnsNeeded];
     double n = stat.NormalizedHigh - (stat.NormalizedHigh - stat.NormalizedLow/2);
     for (int i = 0; i < result.Length; i++)
     {
         result[i] = n;
     }
     return result;
 }
Example #21
0
        public AnalystTestingUtility(String theBaseDataFile)
        {
            _tempDir.ClearContents();
            BaseDataFile = theBaseDataFile;
            _rawFile     = _tempDir.CreateFile("test.csv");
            FileUtil.CopyResource(theBaseDataFile, _rawFile);
            _analystFile = _tempDir.CreateFile("test.ega");
            EncogAnalyst = new EncogAnalyst();

            FileFormat = AnalystFileFormat.DecpntComma;
        }
        /// <inheritdoc/>
        public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat)
        {
            var    result = new double[stat.ColumnsNeeded];
            double n      = stat.NormalizedHigh - (stat.NormalizedHigh - stat.NormalizedLow / 2);

            for (int i = 0; i < result.Length; i++)
            {
                result[i] = n;
            }
            return(result);
        }
Example #23
0
        static void EncogAnalyst()
        {
            var sourceFile = new FileInfo("RawFile.csv");
            var targetFile = new FileInfo("NormalizedFile.csv");
            var analyst    = new EncogAnalyst();
            var wizard     = new AnalystWizard(analyst);

            wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(sourceFile, true, CSVFormat.DecimalComma, analyst);
            norm.Normalize(targetFile);
        }
Example #24
0
        public void Wizard(AnalystGoal goal, WizardMethodType methodType, bool headers)
        {
            EncogAnalyst.MaxIteration = MaxIterations;
            var wiz = new AnalystWizard(EncogAnalyst)
            {
                Goal = goal, MethodType = methodType, EvidenceSegements = 3
            };

            wiz.Wizard(_rawFile, headers, FileFormat);

            EncogAnalyst.Save(_analystFile);
            EncogAnalyst.Load(_analystFile);
        }
 /// <inheritdoc/>
 public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat)
 {
     // mode?
     if (stat.Classify)
     {
         var m = stat.DetermineMode(analyst);
         return stat.Encode(m);
     }
     // mean
     var df = analyst.Script.FindDataField(stat.Name);
     var result = new double[1];
     result[0] = df.Mean;
     return result;
 }
        /// <inheritdoc />
        public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat)
        {
            // mode?
            if (stat.Classify)
            {
                int m = stat.DetermineMode(analyst);
                return(stat.Encode(m));
            }
            // mean
            DataField df     = analyst.Script.FindDataField(stat.Name);
            var       result = new double[1];

            result[0] = df.Mean;
            return(result);
        }
Example #27
0
        static void Step3()
        {
            Console.WriteLine("Step 3: Normalize Training and Evaluation Data");

            //Analyst
            var analyst = new EncogAnalyst();


            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);
            //Cylinders
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;       // continues
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //weight
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Acceleration
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //year
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //Origin
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //Name
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;          // will not use
            //mpg
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;       // contniues



            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;

            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateFile);

            //save the analyst file
            analyst.Save(Config.AnalystFile);
        }
Example #28
0
 public static void DumpFieldInfo(EncogAnalyst analyst)
 {
     Console.WriteLine(@"Fields found in file:");
     foreach (AnalystField field in analyst.Script.Normalize.NormalizedFields)
     {
         var line = new StringBuilder();
         line.Append(field.Name);
         line.Append(",action=");
         line.Append(field.Action);
         line.Append(",min=");
         line.Append(field.ActualLow);
         line.Append(",max=");
         line.Append(field.ActualHigh);
         Console.WriteLine(line.ToString());
     }
 }
Example #29
0
        /// <summary>
        /// Analyze the data. This counts the records and prepares the data to be
        /// processed.
        /// </summary>
        ///
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file to analyze.</param>
        /// <param name="headers">True, if the input file has headers.</param>
        /// <param name="format">The format of the input file.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename      = inputFile;
            ExpectInputHeaders = headers;
            InputFormat        = format;

            Analyzed = true;
            _analyst = theAnalyst;

            if (OutputFormat == null)
            {
                OutputFormat = InputFormat;
            }

            _data = new BasicMLDataSet();
            ResetStatus();
            int recordCount = 0;

            int outputLength = _analyst.DetermineTotalColumns();
            var csv          = new ReadCSV(InputFilename.ToString(),
                                           ExpectInputHeaders, InputFormat);

            ReadHeaders(csv);

            _analystHeaders = new CSVHeaders(InputHeadings);

            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus(true);

                var row = new LoadedRow(csv, 1);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(
                    _analyst, _analystHeaders, csv, outputLength, true);
                var input = new ClusterRow(inputArray, row);
                _data.Add(input);

                recordCount++;
            }
            RecordCount = recordCount;
            Count       = csv.ColumnCount;

            ReadHeaders(csv);
            csv.Close();
            ReportDone(true);
        }
        private void Normalize()
        {
            //Analyst
            var analyst = new EncogAnalyst();
            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);
            //Cylinders
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //weight
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Acceleration
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //year
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Origin
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Name
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;
            //mpg
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;
            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm of Cross Validation
            norm.Analyze(Config.CrossValidationFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedCrossValidationFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateFile);



            //save the analyst file
            analyst.Save(Config.AnalystFile);
        }
        /// <summary>
        /// Analyze the data. This counts the records and prepares the data to be
        /// processed.
        /// </summary>
        ///
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file to analyze.</param>
        /// <param name="headers">True, if the input file has headers.</param>
        /// <param name="format">The format of the input file.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename = inputFile;
            ExpectInputHeaders = headers;
            InputFormat = format;

            Analyzed = true;
            _analyst = theAnalyst;

            if (OutputFormat == null)
            {
                OutputFormat = InputFormat;
            }

            _data = new BasicMLDataSet();
            ResetStatus();
            int recordCount = 0;

            int outputLength = _analyst.DetermineTotalColumns();
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, InputFormat);
            ReadHeaders(csv);

            _analystHeaders = new CSVHeaders(InputHeadings);

            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus(true);

                var row = new LoadedRow(csv, 1);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(
                    _analyst, _analystHeaders, csv, outputLength, true);
                var input = new ClusterRow(inputArray, row);
                _data.Add(input);

                recordCount++;
            }
            RecordCount = recordCount;
            Count = csv.ColumnCount;

            ReadHeaders(csv);
            csv.Close();
            ReportDone(true);
        }
Example #32
0
        public void Evaluate(FileInfo networkFile, FileInfo analystFile, FileInfo EvaluationFile)
        {
            var network = EncogDirectoryPersistence.LoadObject(networkFile) as BasicNetwork;
            var analyst = new EncogAnalyst();

            analyst.Load(analystFile);

            var evaluationSet = EncogUtility.LoadCSV2Memory(EvaluationFile.ToString(), network.InputCount,
                                                            network.OutputCount, true, CSVFormat.English, false);

            int count        = 0;
            int correctCount = 0;

            foreach (var item in evaluationSet)
            {
                var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]);
                var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]);
                var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]);
                var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]);

                int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count;

                double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh;
                double normalizationLow  = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow;

                var output            = network.Compute(item.Input);
                var resulter          = new Equilateral(classCount, normalizationHigh, normalizationLow);
                var predictedClassInt = resulter.Decode(output);
                var predictedClass    = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name;

                var idealClassInt = resulter.Decode(item.Ideal);
                var idealClass    = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name;

                if (predictedClassInt == idealClassInt)
                {
                    ++correctCount;
                }

                Console.WriteLine($"Count: {++count} | Ideal: {idealClass} Predicted:{predictedClass}");
            }

            Console.WriteLine($"Total test count: {count}");
            Console.WriteLine($"Total correct test count: {correctCount}");
            Console.WriteLine($"% Success: {(correctCount*100.0)/count}");
        }
Example #33
0
        public void Normalize(FileInfo baseFile, FileInfo trainingFile, FileInfo normalizedTrainingFile, FileInfo evaluateFile, FileInfo normalizedEvaluateFile, FileInfo analystFile)
        {
            var encogAnalyst  = new EncogAnalyst();
            var analystWizard = new AnalystWizard(encogAnalyst);

            analystWizard.Wizard(baseFile, true, AnalystFileFormat.DecpntComma);

            var normalizer = new AnalystNormalizeCSV();

            normalizer.Analyze(trainingFile, true, CSVFormat.English, encogAnalyst);
            normalizer.ProduceOutputHeaders = true;
            normalizer.Normalize(normalizedTrainingFile);

            normalizer.Analyze(evaluateFile, true, CSVFormat.English, encogAnalyst);
            normalizer.Normalize(normalizedEvaluateFile);

            encogAnalyst.Save(analystFile);
        }
Example #34
0
        /// <summary>
        /// Analyze the data. This counts the records and prepares the data to be
        /// processed.
        /// </summary>
        ///
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file.</param>
        /// <param name="headers">True if headers are present.</param>
        /// <param name="format">The format.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename      = inputFile;
            ExpectInputHeaders = headers;
            Format             = format;

            Analyzed = true;
            _analyst = theAnalyst;

            PerformBasicCounts();
            _fileColumns   = InputHeadings.Length;
            _outputColumns = _analyst.DetermineOutputFieldCount();

            _analystHeaders = new CSVHeaders(InputHeadings);
            _series         = new TimeSeriesUtil(_analyst, false,
                                                 _analystHeaders.Headers);
        }
        /// <summary>
        /// Analyze the data. This counts the records and prepares the data to be
        /// processed.
        /// </summary>
        ///
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file.</param>
        /// <param name="headers">True if headers are present.</param>
        /// <param name="format">The format.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename = inputFile;
            ExpectInputHeaders = headers;
            Format = format;

            Analyzed = true;
            _analyst = theAnalyst;

            PerformBasicCounts();
            _fileColumns = InputHeadings.Length;
            _outputColumns = _analyst.DetermineOutputFieldCount();

            _analystHeaders = new CSVHeaders(InputHeadings);
            _series = new TimeSeriesUtil(_analyst, false,
                                        _analystHeaders.Headers);
        }
Example #36
0
        /// <summary>
        /// Metodo responsavel por avaliar a rede neural treinada com a massa de testes criada no metodo Segregate e normalizada no metodo Normalization
        /// </summary>
        private static void Evaluate()
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkClassificationFile);
            var analyst = new EncogAnalyst();

            analyst.Load(Config.AnalystClassificationFile.ToString());
            var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateClassificationFile.ToString(),
                                                            network.InputCount, network.OutputCount, true, CSVFormat.English, false);

            int count        = 0;
            int CorrectCount = 0;

            foreach (var item in evaluationSet)
            {
                count++;
                var output = network.Compute(item.Input);

                var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]);
                var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]);
                var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]);
                var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]);

                int    classCount        = analyst.Script.Normalize.NormalizedFields[4].Classes.Count;
                double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh;
                double normalizationLow  = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow;

                var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                var predictedClassInt = eq.Decode(output);
                var predictedClass    = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name;
                var idealClassInt     = eq.Decode(item.Ideal);
                var idealClass        = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name;

                if (predictedClassInt == idealClassInt)
                {
                    CorrectCount++;
                }
                Console.WriteLine("Count :{0} Properties [{1},{2},{3},{4}] ,Ideal : {5} Predicted : {6} ",
                                  count, sepal_l, sepal_w, petal_l, petal_w, idealClass, predictedClass);
            }

            Console.WriteLine("Quantidade de itens: {0}", count);
            Console.WriteLine("Quantidade de acertos: {0}", CorrectCount);
            Console.WriteLine("Porcentagem de acertos: {0}", ((CorrectCount * 100.0) / count));
        }
        /// <summary>
        ///     Construct the time-series utility.
        /// </summary>
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="includeOutput">Should output fields be included.</param>
        /// <param name="headings">The column headings.</param>
        public TimeSeriesUtil(EncogAnalyst theAnalyst, bool includeOutput,
                              IEnumerable<string> headings)
        {
            _buffer = new List<double[]>();
            _headingMap = new Dictionary<String, Int32>();
            _analyst = theAnalyst;
            _lagDepth = _analyst.LagDepth;
            _leadDepth = _analyst.LeadDepth;
            _totalDepth = _lagDepth + _leadDepth + 1;
            _inputSize = includeOutput ? _analyst.DetermineTotalColumns() : _analyst.DetermineTotalInputFieldCount();
            _outputSize = _analyst.DetermineInputCount()
                          + _analyst.DetermineOutputCount();

            int headingIndex = 0;

            foreach (String column  in  headings)
            {
                _headingMap[column.ToUpper()] = headingIndex++;
            }
        }
 /// <summary>
 ///     Construct the randomize command.
 /// </summary>
 /// <param name="analyst">The analyst to use.</param>
 public CmdProcess(EncogAnalyst analyst)
     : base(analyst)
 {
 }
 /// <summary>
 ///     Construct the set command with the analyst.
 /// </summary>
 /// <param name="analyst">The analyst to use.</param>
 public CmdSet(EncogAnalyst analyst)
     : base(analyst)
 {
 }
Example #40
0
 /// <summary>
 /// Construct the evaluate command.
 /// </summary>
 ///
 /// <param name="theAnalyst">The analyst to use.</param>
 public CmdEvaluate(EncogAnalyst theAnalyst)
     : base(theAnalyst)
 {
 }
 /// <summary>
 ///     Construct this generate command.
 /// </summary>
 /// <param name="analyst">The analyst to use.</param>
 public CmdGenerate(EncogAnalyst analyst)
     : base(analyst)
 {
 }
 /// <summary>
 ///     Construct the analyst wizard.
 /// </summary>
 /// <param name="theAnalyst">The analyst to use.</param>
 public AnalystWizard(EncogAnalyst theAnalyst)
 {
     _directClassification = false;
     _taskSegregate = true;
     _taskRandomize = true;
     _taskNormalize = true;
     _taskBalance = false;
     _taskCluster = true;
     _range = NormalizeRange.NegOne2One;
     _analyst = theAnalyst;
     _script = _analyst.Script;
     _methodType = WizardMethodType.FeedForward;
     TargetFieldName = "";
     _goal = AnalystGoal.Classification;
     _leadWindowSize = 0;
     _lagWindowSize = 0;
     _includeTargetField = false;
     _missing = new DiscardMissing();
     MaxError = DefaultTrainError;
     NaiveBayes = false;
 }
 /// <summary>
 /// Construct the bridge object.
 /// </summary>
 ///
 /// <param name="theAnalyst">The Encog analyst to use.</param>
 public AnalystReportBridge(EncogAnalyst theAnalyst)
 {
     _analyst = theAnalyst;
 }
        /// <summary>
        /// Determine the mode, this is the class item that has the most instances.
        /// </summary>
        /// <param name="analyst">The analyst to use.</param>
        /// <returns>The mode.</returns>
        public int DetermineMode(EncogAnalyst analyst)
        {
            if (!Classify)
            {
                throw new AnalystError("Can only calculate the mode for a class.");
            }

            DataField df = analyst.Script.FindDataField(Name);
            AnalystClassItem m = null;
            int result = 0;
            int idx = 0;
            foreach (AnalystClassItem item in df.ClassMembers)
            {
                if (m == null || m.Count < item.Count)
                {
                    m = item;
                    result = idx;
                }
                idx++;
            }

            return result;
        }
 /// <summary>
 ///     Construct the balance command.
 /// </summary>
 /// <param name="analyst">The analyst to use with this command.</param>
 public CmdBalance(EncogAnalyst analyst)
     : base(analyst)
 {
 }
Example #46
0
        /// <summary>
        ///     Process the file and cluster.
        /// </summary>
        /// <param name="outputFile">The output file.</param>
        /// <param name="clusters">The number of clusters.</param>
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="iterations">The number of iterations to use.</param>
        public void Process(FileInfo outputFile, int clusters,
                            EncogAnalyst theAnalyst, int iterations)
        {
            StreamWriter tw = PrepareOutputFile(outputFile);

            ResetStatus();

            var cluster = new KMeansClustering(clusters,
                                               _data);
            cluster.Iteration(iterations);

            int clusterNum = 0;

            foreach (IMLCluster cl  in  cluster.Clusters)
            {
                foreach (IMLData item  in  cl.Data)
                {
                    int clsIndex = item.Count;
                    var lr = new LoadedRow(Format, item, 1);
                    lr.Data[clsIndex] = "" + clusterNum;
                    WriteRow(tw, lr);
                }
                clusterNum++;
            }

            ReportDone(false);
            tw.Close();
        }
        /// <summary>
        /// Extract fields from a file into a numeric array for machine learning.
        /// </summary>
        ///
        /// <param name="analyst">The analyst to use.</param>
        /// <param name="headers">The headers for the input data.</param>
        /// <param name="csv">The CSV that holds the input data.</param>
        /// <param name="outputLength">The length of the returned array.</param>
        /// <param name="skipOutput">True if the output should be skipped.</param>
        /// <returns>The encoded data.</returns>
        public static double[] ExtractFields(EncogAnalyst analyst,
                                             CSVHeaders headers, ReadCSV csv,
                                             int outputLength, bool skipOutput)
        {
            var output = new double[outputLength];
            int outputIndex = 0;

            foreach (AnalystField stat in analyst.Script.Normalize.NormalizedFields)
            {
                if (stat.Action == NormalizationAction.Ignore)
                {
                    continue;
                }

                if (stat.Output && skipOutput)
                {
                    continue;
                }

                int index = headers.Find(stat.Name);
                String str = csv.Get(index);

                // is this an unknown value?
                if (str.Equals("?") || str.Length == 0)
                {
                    IHandleMissingValues handler = analyst.Script.Normalize.MissingValues;
                    double[] d = handler.HandleMissing(analyst, stat);

                    // should we skip the entire row
                    if (d == null)
                    {
                        return null;
                    }

                    // copy the returned values in place of the missing values
                    for (int i = 0; i < d.Length; i++)
                    {
                        output[outputIndex++] = d[i];
                    }
                }
                else
                {
                    // known value

                    if (stat.Action == NormalizationAction.Normalize)
                    {
                        double d = csv.Format.Parse(str.Trim());
                        d = stat.Normalize(d);
                        output[outputIndex++] = d;
                    }
                    else
                    {
                        double[] d = stat.Encode(str.Trim());

                        foreach (double element in d)
                        {
                            output[outputIndex++] = element;
                        }
                    }
                }
            }

            return output;
        }
 /// <summary>
 /// Construct the randomize command.
 /// </summary>
 ///
 /// <param name="analyst">The analyst to use.</param>
 public CmdRandomize(EncogAnalyst analyst) : base(analyst)
 {
 }
Example #49
0
        /// <summary>
        ///     Perform the analysis.
        /// </summary>
        /// <param name="target">The Encog analyst object to analyze.</param>
        public void Process(EncogAnalyst target)
        {
            int count = 0;
            CSVFormat csvFormat = ConvertStringConst
                .ConvertToCSVFormat(_format);
            var csv = new ReadCSV(_filename, _headers, csvFormat);

            // pass one, calculate the min/max
            while (csv.Next())
            {
                if (_fields == null)
                {
                    GenerateFields(csv);
                }

                for (int i = 0; i < csv.ColumnCount; i++)
                {
                    if (_fields != null)
                    {
                        _fields[i].Analyze1(csv.Get(i));
                    }
                }
                count++;
            }

            if (count == 0)
            {
                throw new AnalystError("Can't analyze file, it is empty.");
            }

            if (_fields != null)
            {
                foreach (AnalyzedField field in _fields)
                {
                    field.CompletePass1();
                }
            }

            csv.Close();

            // pass two, standard deviation
            csv = new ReadCSV(_filename, _headers, csvFormat);
           
            while (csv.Next())
            {
                for (int i = 0; i < csv.ColumnCount; i++)
                {
                    if (_fields != null)
                    {
                        _fields[i].Analyze2(csv.Get(i));
                    }
                }
            }


            if (_fields != null)
            {
                foreach (AnalyzedField field in _fields)
                {
                    field.CompletePass2();
                }
            }

            csv.Close();

            String str = _script.Properties.GetPropertyString(
                ScriptProperties.SetupConfigAllowedClasses) ?? "";

            bool allowInt = str.Contains("int");
            bool allowReal = str.Contains("real")
                             || str.Contains("double");
            bool allowString = str.Contains("string");


            // remove any classes that did not qualify
            foreach (AnalyzedField field  in  _fields)
            {
                if (field.Class)
                {
                    if (!allowInt && field.Integer)
                    {
                        field.Class = false;
                    }

                    if (!allowString && (!field.Integer && !field.Real))
                    {
                        field.Class = false;
                    }

                    if (!allowReal && field.Real && !field.Integer)
                    {
                        field.Class = false;
                    }
                }
            }

            // merge with existing
            if ((target.Script.Fields != null)
                && (_fields.Length == target.Script.Fields.Length))
            {
                for (int i = 0; i < _fields.Length; i++)
                {
                    // copy the old field name
                    _fields[i].Name = target.Script.Fields[i].Name;

                    if (_fields[i].Class)
                    {
                        IList<AnalystClassItem> t = _fields[i].AnalyzedClassMembers;
                        IList<AnalystClassItem> s = target.Script.Fields[i].ClassMembers;

                        if (s.Count == t.Count)
                        {
                            for (int j = 0; j < s.Count; j++)
                            {
                                if (t[j].Code.Equals(s[j].Code))
                                {
                                    t[j].Name = s[j].Name;
                                }
                            }
                        }
                    }
                }
            }

            // now copy the fields
            var df = new DataField[_fields.Length];

            for (int i_4 = 0; i_4 < df.Length; i_4++)
            {
                df[i_4] = _fields[i_4].FinalizeField();
            }

            target.Script.Fields = df;
        }
Example #50
0
 /// <summary>
 ///     Construct the train command.
 /// </summary>
 /// <param name="analyst">The analyst to use.</param>
 public CmdTrain(EncogAnalyst analyst)
     : base(analyst)
 {
 }
Example #51
0
 /// <summary>
 ///     Construct the create command.
 /// </summary>
 /// <param name="theAnalyst">The analyst to use.</param>
 public CmdCreate(EncogAnalyst theAnalyst) : base(theAnalyst)
 {
 }
 /// <summary>
 /// Construct the report.
 /// </summary>
 ///
 /// <param name="theAnalyst">The analyst to use.</param>
 public AnalystReport(EncogAnalyst theAnalyst)
 {
     _analyst = theAnalyst;
 }
Example #53
0
 /// <summary>
 ///     Construct this generate command.
 /// </summary>
 /// <param name="analyst">The analyst to use.</param>
 public CmdCode(EncogAnalyst analyst)
     : base(analyst)
 {
 }
Example #54
0
 public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat)
 {
     return null;
 }
 /// <summary>
 ///     Construct an evaluate raw command.
 /// </summary>
 /// <param name="analyst">The analyst object to use.</param>
 public CmdEvaluateRaw(EncogAnalyst analyst)
     : base(analyst)
 {
 }
Example #56
0
File: Cmd.cs Project: neismit/emds
 /// <summary>
 /// Construct this command.
 /// </summary>
 ///
 /// <param name="theAnalyst">The analyst that this command belongs to.</param>
 protected Cmd(EncogAnalyst theAnalyst)
 {
     _analyst = theAnalyst;
     _script = _analyst.Script;
     _properties = _script.Properties;
 }
 /// <summary>
 /// Construct the normalize command.
 /// </summary>
 ///
 /// <param name="theAnalyst">The analyst to use.</param>
 public CmdNormalize(EncogAnalyst theAnalyst)
     : base(theAnalyst)
 {
 }
Example #58
0
 /// <summary>
 /// Construct the segregate command.
 /// </summary>
 ///
 /// <param name="analyst">The analyst to use.</param>
 public CmdSegregate(EncogAnalyst analyst)
     : base(analyst)
 {
 }
 /// <summary>
 /// Construct the cluster command.
 /// </summary>
 ///
 /// <param name="analyst">The analyst object to use.</param>
 public CmdCluster(EncogAnalyst analyst) : base(analyst)
 {
 }
        /// <summary>
        /// Analyze the file.
        /// </summary>
        ///
        /// <param name="inputFilename">The input file.</param>
        /// <param name="expectInputHeaders">True, if input headers are present.</param>
        /// <param name="inputFormat">The format.</param>
        /// <param name="theAnalyst">The analyst to use.</param>
        public void Analyze(FileInfo inputFilename,
                            bool expectInputHeaders, CSVFormat inputFormat,
                            EncogAnalyst theAnalyst)
        {
            InputFilename = inputFilename;
            InputFormat = inputFormat;
            ExpectInputHeaders = expectInputHeaders;
            _analyst = theAnalyst;
            Analyzed = true;

            _analystHeaders = new CSVHeaders(inputFilename, expectInputHeaders,
                                            inputFormat);


            foreach (AnalystField field  in  _analyst.Script.Normalize.NormalizedFields)
            {
                field.Init();
            }

            _series = new TimeSeriesUtil(_analyst,
                                        _analystHeaders.Headers);
        }