Ejemplo n.º 1
0
        private static void Step3()
        {
            Console.WriteLine("Step 3: Normalize Training and Evaluation Data");

            //Analyst
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);

            //Norm for Training
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm for evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedEvaluateFile);

            analyst.Save(Config.AnalystFile);
        }
Ejemplo n.º 2
0
        /// <see cref="INetwork.Normalize"/>
        public INetwork Normalize()
        {
            //Analyst
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(BaseFile, true, AnalystFileFormat.DecpntComma);

            // Configure normalized fields
            ConfigureNormalizedFields(analyst);

            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(EvaluateFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedEvaluateFile);

            //Save the analyst file
            analyst.Save(AnalystFile);

            return(this);
        }
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            Console.WriteLine("Running wizard...");
            var analyst = new EncogAnalyst();

            var wizard = new AnalystWizard(analyst);

            wizard.TargetFieldName = "field:1";
            wizard.Wizard(sourceCSV,
                          false, AnalystFileFormat.DecpntComma);


            // customer id
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.PassThrough;

            var norm = new AnalystNormalizeCSV();

            norm.Report = new ConsoleStatusReportable();
            Console.WriteLine("Analyze for normalize...");
            norm.Analyze(sourceCSV, false, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            Console.WriteLine("Normalize...");
            norm.Normalize(targetCSV);
            analyst.Save(scriptEGA);
        }
Ejemplo n.º 4
0
        public static void NormalizeData()
        {
            var toNormalize = new FileInfo(csvPath);
            var Normalized  = new FileInfo(csvPathNormalized);
            var analyst     = new EncogAnalyst();


            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(toNormalize, true, AnalystFileFormat.DecpntComma);

            var norm = new AnalystNormalizeCSV();

            norm.Analyze(toNormalize, true, CSVFormat.English, analyst);
            foreach (AnalystField field in analyst.Script.Normalize.NormalizedFields)
            {
                field.NormalizedHigh = 1;
                field.NormalizedLow  = -1;
                field.Action         = Encog.Util.Arrayutil.NormalizationAction.Normalize;
                if (field.Name == "cls")
                {
                    field.Action = Encog.Util.Arrayutil.NormalizationAction.PassThrough;
                }
            }


            norm.ProduceOutputHeaders = true;
            norm.Normalize(Normalized);
        }
Ejemplo n.º 5
0
        public void Execute(IExampleInterface app)
        {
            if (app.Args.Length != 2)
            {
                Console.WriteLine(@"Note: This example assumes that headers are present in the CSV files.");
                Console.WriteLine(@"NormalizeFile [input file] [target file]");
            }
            else
            {
                var sourceFile = new FileInfo(app.Args[0]);
                var targetFile = new FileInfo(app.Args[1]);

                var analyst = new EncogAnalyst();
                var wizard  = new AnalystWizard(analyst);
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                DumpFieldInfo(analyst);

                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);
                EncogFramework.Instance.Shutdown();
            }
        }
Ejemplo n.º 6
0
        static void EncogAnalyst()
        {
            var sourceFile = new FileInfo("RawFile.csv");
            var targetFile = new FileInfo("NormalizedFile.csv");
            var analyst    = new EncogAnalyst();
            var wizard     = new AnalystWizard(analyst);

            wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(sourceFile, true, CSVFormat.DecimalComma, analyst);
            norm.Normalize(targetFile);
        }
Ejemplo n.º 7
0
        static void Step3()
        {
            Console.WriteLine("Step 3: Normalize Training and Evaluation Data");

            //Analyst
            var analyst = new EncogAnalyst();


            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);
            //Cylinders
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;       // continues
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //weight
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Acceleration
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //year
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //Origin
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //Name
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;          // will not use
            //mpg
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;       // contniues



            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;

            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateFile);

            //save the analyst file
            analyst.Save(Config.AnalystFile);
        }
Ejemplo n.º 8
0
        /// <summary>
        /// Analyze the data. This counts the records and prepares the data to be
        /// processed.
        /// </summary>
        ///
        /// <param name="theAnalyst">The analyst to use.</param>
        /// <param name="inputFile">The input file to analyze.</param>
        /// <param name="headers">True, if the input file has headers.</param>
        /// <param name="format">The format of the input file.</param>
        public void Analyze(EncogAnalyst theAnalyst,
                            FileInfo inputFile, bool headers, CSVFormat format)
        {
            InputFilename      = inputFile;
            ExpectInputHeaders = headers;
            InputFormat        = format;

            Analyzed = true;
            _analyst = theAnalyst;

            if (OutputFormat == null)
            {
                OutputFormat = InputFormat;
            }

            _data = new BasicMLDataSet();
            ResetStatus();
            int recordCount = 0;

            int outputLength = _analyst.DetermineTotalColumns();
            var csv          = new ReadCSV(InputFilename.ToString(),
                                           ExpectInputHeaders, InputFormat);

            ReadHeaders(csv);

            _analystHeaders = new CSVHeaders(InputHeadings);

            while (csv.Next() && !ShouldStop())
            {
                UpdateStatus(true);

                var row = new LoadedRow(csv, 1);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(
                    _analyst, _analystHeaders, csv, outputLength, true);
                var input = new ClusterRow(inputArray, row);
                _data.Add(input);

                recordCount++;
            }
            RecordCount = recordCount;
            Count       = csv.ColumnCount;

            ReadHeaders(csv);
            csv.Close();
            ReportDone(true);
        }
Ejemplo n.º 9
0
        private void Normalize()
        {
            //Analyst
            var analyst = new EncogAnalyst();
            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);
            //Cylinders
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //weight
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Acceleration
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //year
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Origin
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Name
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;
            //mpg
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;
            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm of Cross Validation
            norm.Analyze(Config.CrossValidationFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedCrossValidationFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateFile);



            //save the analyst file
            analyst.Save(Config.AnalystFile);
        }
Ejemplo n.º 10
0
        public void Normalize(FileInfo baseFile, FileInfo trainingFile, FileInfo normalizedTrainingFile, FileInfo evaluateFile, FileInfo normalizedEvaluateFile, FileInfo analystFile)
        {
            var encogAnalyst  = new EncogAnalyst();
            var analystWizard = new AnalystWizard(encogAnalyst);

            analystWizard.Wizard(baseFile, true, AnalystFileFormat.DecpntComma);

            var normalizer = new AnalystNormalizeCSV();

            normalizer.Analyze(trainingFile, true, CSVFormat.English, encogAnalyst);
            normalizer.ProduceOutputHeaders = true;
            normalizer.Normalize(normalizedTrainingFile);

            normalizer.Analyze(evaluateFile, true, CSVFormat.English, encogAnalyst);
            normalizer.Normalize(normalizedEvaluateFile);

            encogAnalyst.Save(analystFile);
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Metodo responsavel por normalizar as informacoes para adequar a execucao da rede neural
        /// </summary>
        private static void Normalization()
        {
            var analyst = new EncogAnalyst();

            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.ClassificationFile, true, AnalystFileFormat.DecpntComma);

            var norm = new AnalystNormalizeCSV();

            norm.Analyze(Config.TrainingClassificationFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedTrainingClassificationFile);

            norm.Analyze(Config.EvaluateClassificationFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateClassificationFile);

            analyst.Save(Config.AnalystClassificationFile);
        }
        /// <summary>
        /// Metodo responsavel por normalizar as informacoes para adequar a execucao da rede neural
        /// </summary>
        private static void Normalization()
        {
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.RegressionFile, true, AnalystFileFormat.DecpntComma);

            //Cilindros
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Peso
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Aceleração
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Ano
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Origem
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Nome
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;
            //MPG
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;

            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;

            norm.Analyze(Config.TrainingRegressionFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingRegressionFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateRegressionFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateRegressionFile);

            //save the analyst file
            analyst.Save(Config.AnalystRegressionFile);
        }
Ejemplo n.º 13
0
        public void NormalizeData(string normalPath, string normalizedPath)
        {
            var toNormalizeTraining = new FileInfo(normalPath);
            var NormalizedTest      = new FileInfo(normalizedPath);
            var analyst             = new EncogAnalyst();


            var wizard = new AnalystWizard(analyst);

            if (isRegression)
            {
                wizard.TargetFieldName = "y";
            }
            else
            {
                wizard.TargetFieldName = "cls";
            }

            //toNormalize.
            wizard.Wizard(toNormalizeTraining, true, AnalystFileFormat.DecpntComma);


            var norm = new AnalystNormalizeCSV();

            norm.Analyze(toNormalizeTraining, true, CSVFormat.English, analyst);
            foreach (AnalystField field in analyst.Script.Normalize.NormalizedFields)
            {
                field.NormalizedHigh = 1;
                field.NormalizedLow  = 0;
                field.Action         = Encog.Util.Arrayutil.NormalizationAction.Normalize;
                if (field.Name == "cls")
                {
                    field.Action = Encog.Util.Arrayutil.NormalizationAction.PassThrough;
                }
            }


            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedTest);
        }
Ejemplo n.º 14
0
        /// <inheritdoc/>
        public override sealed bool ExecuteCommand(String args)
        {
            // get filenames
            String sourceID = Prop.GetPropertyString(
                ScriptProperties.NormalizeConfigSourceFile);
            String targetID = Prop.GetPropertyString(
                ScriptProperties.NormalizeConfigTargetFile);

            FileInfo sourceFile = Script.ResolveFilename(sourceID);
            FileInfo targetFile = Script.ResolveFilename(targetID);

            EncogLogging.Log(EncogLogging.LevelDebug, "Beginning normalize");
            EncogLogging.Log(EncogLogging.LevelDebug, "source file:" + sourceID);
            EncogLogging.Log(EncogLogging.LevelDebug, "target file:" + targetID);

            // mark generated
            Script.MarkGenerated(targetID);

            // get formats
            CSVFormat inputFormat = Script
                                    .DetermineInputFormat(sourceID);
            CSVFormat outputFormat = Script.DetermineOutputFormat();

            // prepare to normalize
            var norm = new AnalystNormalizeCSV {
                Script = Script
            };

            Analyst.CurrentQuantTask = norm;
            norm.Report = new AnalystReportBridge(Analyst);

            bool headers = Script.ExpectInputHeaders(sourceID);

            norm.Analyze(sourceFile, headers, inputFormat, Analyst);
            norm.OutputFormat         = outputFormat;
            norm.ProduceOutputHeaders = true;
            norm.Normalize(targetFile);
            Analyst.CurrentQuantTask = null;
            return(norm.ShouldStop());
        }
Ejemplo n.º 15
0
        private void Normalize(FileInfo BaseFile, FileInfo TrainingFile, FileInfo NormalizedTrainingFile, FileInfo EvaluateFile, FileInfo NormalizedEvaluateFile, FileInfo AnalystFile)
        {
            //Analyst
            var analyst = new EncogAnalyst();
            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(BaseFile, true, AnalystFileFormat.DecpntComma);

            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(NormalizedEvaluateFile);

            //save the analyst file
            analyst.Save(AnalystFile);
        }
Ejemplo n.º 16
0
        public void Normalize(FileInfo baseFile, FileInfo trainingFile, FileInfo normalizedTrainingFile, FileInfo evaluateFile, FileInfo normalizedEvaluateFile, FileInfo analystFile)
        {
            var encogAnalyst  = new EncogAnalyst();
            var analystWizard = new AnalystWizard(encogAnalyst);

            analystWizard.Wizard(baseFile, true, AnalystFileFormat.DecpntComma);

            //Cylinders:
            encogAnalyst.Script.Normalize.NormalizedFields[0].Action = NormalizationAction.Equilateral;
            //Displacement:
            encogAnalyst.Script.Normalize.NormalizedFields[1].Action = NormalizationAction.Normalize;
            //Horsepower:
            encogAnalyst.Script.Normalize.NormalizedFields[2].Action = NormalizationAction.Normalize;
            //Weight:
            encogAnalyst.Script.Normalize.NormalizedFields[3].Action = NormalizationAction.Normalize;
            //Acceleration:
            encogAnalyst.Script.Normalize.NormalizedFields[4].Action = NormalizationAction.Normalize;
            //Year:
            encogAnalyst.Script.Normalize.NormalizedFields[5].Action = NormalizationAction.Equilateral;
            //Origin:
            encogAnalyst.Script.Normalize.NormalizedFields[6].Action = NormalizationAction.Equilateral;
            //Name:
            encogAnalyst.Script.Normalize.NormalizedFields[7].Action = NormalizationAction.Ignore;
            //Mpg:
            encogAnalyst.Script.Normalize.NormalizedFields[8].Action = NormalizationAction.Normalize;

            var normalizer = new AnalystNormalizeCSV();

            normalizer.Analyze(trainingFile, true, CSVFormat.English, encogAnalyst);
            normalizer.ProduceOutputHeaders = true;
            normalizer.Normalize(normalizedTrainingFile);

            normalizer.Analyze(evaluateFile, true, CSVFormat.English, encogAnalyst);
            normalizer.Normalize(normalizedEvaluateFile);

            encogAnalyst.Save(analystFile);
        }
Ejemplo n.º 17
0
        /// <summary>
        /// Process the file.
        /// </summary>
        ///
        /// <param name="outputFile">The output file.</param>
        /// <param name="method">THe method to use.</param>
        public void Process(FileInfo outputFile, IMLMethod method)
        {
            var csv = new ReadCSV(InputFilename.ToString(),
                                  ExpectInputHeaders, Format);

            IMLData output;

            foreach (AnalystField field in _analyst.Script.Normalize.NormalizedFields)
            {
                field.Init();
            }

            int outputLength = _analyst.DetermineTotalInputFieldCount();

            StreamWriter tw = PrepareOutputFile(outputFile);

            ResetStatus();
            while (csv.Next())
            {
                UpdateStatus(false);
                var row = new LoadedRow(csv, _outputColumns);

                double[] inputArray = AnalystNormalizeCSV.ExtractFields(_analyst,
                                                                        _analystHeaders, csv, outputLength, true);
                if (_series.TotalDepth > 1)
                {
                    inputArray = _series.Process(inputArray);
                }

                if (inputArray != null)
                {
                    IMLData input = new BasicMLData(inputArray);

                    // evaluation data
                    if ((method is IMLClassification) &&
                        !(method is IMLRegression))
                    {
                        // classification only?
                        var tmp = new BasicMLData(1);
                        tmp[0] = ((IMLClassification)method).Classify(input);
                        output = tmp;
                    }
                    else
                    {
                        // regression
                        output = ((IMLRegression)method).Compute(input);
                    }

                    // skip file data
                    int index       = _fileColumns;
                    int outputIndex = 0;


                    // display output
                    foreach (AnalystField field  in  _analyst.Script.Normalize.NormalizedFields)
                    {
                        if (_analystHeaders.Find(field.Name) != -1)
                        {
                            if (field.Output)
                            {
                                if (field.Classify)
                                {
                                    // classification
                                    ClassItem cls = field.DetermineClass(
                                        outputIndex, output);
                                    outputIndex += field.ColumnsNeeded;
                                    if (cls == null)
                                    {
                                        row.Data[index++] = "?Unknown?";
                                    }
                                    else
                                    {
                                        row.Data[index++] = cls.Name;
                                    }
                                }
                                else
                                {
                                    // regression
                                    double n = output[outputIndex++];
                                    n = field.DeNormalize(n);
                                    row.Data[index++] = Format
                                                        .Format(n, Precision);
                                }
                            }
                        }
                    }
                }

                WriteRow(tw, row);
            }
            ReportDone(false);
            tw.Close();
            csv.Close();
        }
Ejemplo n.º 18
0
        /// <summary> Predict network outputs. </summary>
        private void NetworkBtn_Click(object sender, EventArgs e)
        {
            if (networkSaveDataList.Count == 0)
            {
                MessageBox.Show("You must select a network file first. If you do not have one you can create one in the network creater window", "No Network File Error");
                return;
            }

            notIncluded.Clear();

            //Setup network.
            NeuralNetwork network;

            //Switch to set network type based on type stored in json file.
            switch (networkSaveDataList[selectedNetwork].NetworkType)
            {
            case "EncogNeuralNetwork":
            {
                network = new EncogNeuralNetwork();
                break;
            }

            case "EncogDeepNeuralNetwork":
            {
                network = new EncogDeepNeuralNetwork();
                break;
            }

            case "AccordNeuralNetwork":
            {
                network = new AccordNeuralNetwork();
                break;
            }

            default:
            {
                network = new AccordDeepNeuralNetwork();
                break;
            }
            }

            //Initialize variables
            info = new Data();
            string dataFile = networkSaveDataList[selectedNetwork].CsvFile;
            string path     = networkSaveDataList[selectedNetwork].Path;

            double[][] answers;

            if (!Directory.Exists(path + "normal"))
            {
                MessageBox.Show("You have no \"normal\" folder, please ensure that you are selecting the same csv as the one used in network creation. It may also be the case that you have moved the working directory of the csv file without also moving its dependant folders.", "File Access Error");
                return;
            }

            if (!Directory.Exists(path + "networks"))
            {
                MessageBox.Show("You have no \"networks\" folder, please ensure that you have created at least one network. It may also be the case that you have moved the working directory of the csv file without also moving its dependant folders.", "File Access Error");
                return;
            }

            //Load analyst from earlier.
            analyst = new EncogAnalyst();
            analyst.Load(new FileInfo(path + @"normal\normalizationData" + dataFile.Replace(".csv", ".ega")));

            FileInfo sourcefile = new FileInfo(path + dataFile);

            AnalystNormalizeCSV norm = new AnalystNormalizeCSV();

            //norm.InputHeadings = networkSaveDataList[selectedNetwork].Headings.ToArray();
            network.Load(networkSaveDataList[selectedNetwork].NetworkFile);

            List <bool> vb = new List <bool>();

            foreach (string heading in networkSaveDataList[selectedNetwork].InputHeadings)
            {
                vb.Add(false);
            }

            notIncluded.Add(vb);

            if (!csvBox.Checked)
            {
                string outString = "";

                for (int i = 0; i < items[selectedNetwork].Count; i++)
                {
                    notIncluded[0][i] = string.IsNullOrWhiteSpace(items[selectedNetwork][i]);

                    outString += items[selectedNetwork][i] + ",";
                }

                outString += ",";

                outString.Remove(outString.Length - 1);

                if (File.Exists(path + dataFile.Replace(".csv", "Temp.csv")))
                {
                    File.Delete(path + dataFile.Replace(".csv", "Temp.csv"));
                }

                using (var sw = new StreamWriter(path + dataFile.Replace(".csv", "Temp.csv"), true))
                {
                    for (int i = 0; i < networkSaveDataList[selectedNetwork].InputHeadings.Count; i++)
                    {
                        sw.Write(networkSaveDataList[selectedNetwork].InputHeadings[i] + ",");
                    }

                    for (int i = 0; i < networkSaveDataList[selectedNetwork].OutputHeadings.Count; i++)
                    {
                        if (i != networkSaveDataList[selectedNetwork].OutputHeadings.Count - 1)
                        {
                            sw.Write(networkSaveDataList[selectedNetwork].OutputHeadings[i] + ",");
                        }
                        else
                        {
                            sw.Write(networkSaveDataList[selectedNetwork].OutputHeadings[i]);
                        }
                    }

                    sw.WriteLine();
                    sw.WriteLine(outString);
                }

                var inputFile     = new FileInfo(path + dataFile.Replace(".csv", "Temp.csv"));
                var inputFileNorm = new FileInfo(path + @"normal\" + dataFile.Replace(".csv", "TempNormal.csv"));
                norm.Analyze(inputFile, true, CSVFormat.English, analyst);

                try
                {
                    norm.Normalize(inputFileNorm);
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message + "\n\nThe inputs you have entered do not correctly follow the current normalisation data. This may because you " +
                                    "have entered values above or below the currently stored highs and lows for numbers. It is also possible you have " +
                                    "entered a textual word that has not been used before. Finally it is possible that you are have not used the correct " +
                                    "case for a word as they are case sensitive.", "Normalisation Failure");
                    return;
                }

                info    = info.ReturnInfo(path + @"normal\" + dataFile.Replace(".csv", "TempNormal.csv"), networkSaveDataList[selectedNetwork].OutputHeadings, 0, true);
                answers = Data.CreateArray <double>(1, info.OutputNumber);
            }
            else
            {
                //Reset paths.
                path = null; dataFile = null;

                //Get csv path.
                openFileDialog1.Filter = "csv files (*.csv)|*.csv";
                DialogResult dialogResult = openFileDialog1.ShowDialog();

                //Exit function if file selection was cancelled.
                if (dialogResult == DialogResult.Cancel)
                {
                    output.Text += "Cancelling File Selection. . .\n";
                    return;
                }

                //Check the file is .csv format.
                if (!openFileDialog1.SafeFileName.EndsWith(".csv"))
                {
                    MessageBox.Show("The file you have selected is not in the correct format (.csv)", "File Access Error");
                    return;
                }

                //Setup paths from file.
                dataFile = openFileDialog1.SafeFileName;
                path     = openFileDialog1.FileName.Replace(openFileDialog1.SafeFileName, "");
                var sourceFile = new FileInfo(openFileDialog1.FileName);
                var normalFile = new FileInfo(openFileDialog1.FileName.Replace(openFileDialog1.SafeFileName, @"normal\" + openFileDialog1.SafeFileName.Replace(".csv", "Normal.csv")));

                output.Text += "Loading File: " + dataFile + ". . .\n";

                Data.Normalise(sourceFile, normalFile, path, dataFile, networkSaveDataList[selectedNetwork].OutputHeadings.Count, false);

                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);

                try
                {
                    norm.Normalize(normalFile);
                }
                catch (Exception ex)
                {
                    MessageBox.Show(ex.Message + "\n\nThe inputs you have entered do not correctly follow the current normalisation data. This may because you " +
                                    "have entered values above or below the currently stored highs and lows for numbers. It is also possible you have " +
                                    "entered a textual word that has not been used before. Finally it is possible that you are have not used the correct" +
                                    "case for a word as they are case sensitive.", "Normalisation Failure");
                    return;
                }

                info = info.ReturnInfo(path + @"normal\" + dataFile.Replace(".csv", "Normal.csv"), networkSaveDataList[selectedNetwork].OutputHeadings, 0, true);

                answers = Data.CreateArray <double>(info.InputData.Length, networkSaveDataList[selectedNetwork].OutputHeadings.Count);

                //Get length of CSV, Inputs and Outputs.
                using (var reader = new StreamReader(openFileDialog1.FileName))
                {
                    reader.ReadLine();
                    int lineNo = 0;

                    while (!reader.EndOfStream)
                    {
                        var line   = reader.ReadLine();
                        var values = line.Split(',');

                        if (lineNo > 0)
                        {
                            notIncluded.Add(new List <bool>(vb));
                        }

                        for (int i = 0; i < notIncluded[0].Count; i++)
                        {
                            notIncluded[lineNo][i] = string.IsNullOrWhiteSpace(values[i]);
                        }

                        lineNo++;
                    }
                }
            }

            //Compute network predictions.
            for (int i = 0; i < answers.Length; i++)
            {
                //Switch to set network type based on type stored in json file.
                switch (networkSaveDataList[selectedNetwork].NetworkType)
                {
                case "AccordNeuralNetwork":
                {
                    answers[i] = network.AccordNetwork.Compute(info.InputData[i]);
                    break;
                }

                case "AccordDeepNeuralNetwork":
                {
                    answers[i] = network.DeepAccordNetwork.Compute(info.InputData[i]);
                    break;
                }

                default:
                {
                    network.EncogNetwork.Compute(info.InputData[i], answers[i]);
                    break;
                }
                }
            }

            info.Prediction = answers;

            //Output answers to text box.
            output.Text += network.Display(answers, analyst, networkSaveDataList[selectedNetwork].OutputHeadings, path + @"normal\" + dataFile.Replace(".csv", "Normal.csv"));
        }
Ejemplo n.º 19
0
        /// <summary>
        /// Create normalised traning, testing files with encoh analist file
        /// </summary>
        /// <param name="baseFile"></param>
        /// <param name="trainingFile"></param>
        /// <param name="testingFile"></param>
        /// <param name="normalisedTrainingFile"></param>
        /// <param name="normalisedTestingFile"></param>
        /// <param name="analystFile"></param>
        public void Normalise(FileInfo baseFile, FileInfo trainingFile, FileInfo testingFile, FileInfo normalisedTrainingFile, FileInfo normalisedTestingFile, FileInfo analystFile)
        {
            try
            {
                //Analyst
                var analyst = new EncogAnalyst();

                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.Wizard(baseFile, true, AnalystFileFormat.DecpntComma);

                // inputs
                // 1 - fixed acidity
                analyst.Script.Normalize.NormalizedFields[0].Action = NormalizationAction.Normalize;  // contniues
                // 2 - volatile acidity
                analyst.Script.Normalize.NormalizedFields[1].Action = NormalizationAction.Normalize;  // contniues
                // 3 - citric acid
                analyst.Script.Normalize.NormalizedFields[2].Action = NormalizationAction.Normalize;  // contniues
                // 4 - residual sugar
                analyst.Script.Normalize.NormalizedFields[3].Action = NormalizationAction.Normalize;  // contniues
                // 5 - chlorides
                analyst.Script.Normalize.NormalizedFields[4].Action = NormalizationAction.Normalize;  // contniues
                // 6 - free sulfur dioxide
                analyst.Script.Normalize.NormalizedFields[5].Action = NormalizationAction.Normalize;  // discrete
                // 7 - total sulfur dioxide
                analyst.Script.Normalize.NormalizedFields[6].Action = NormalizationAction.Normalize;  // discrete
                // 8 - density
                analyst.Script.Normalize.NormalizedFields[7].Action = NormalizationAction.Normalize;  // contniues
                // 9 - pH
                analyst.Script.Normalize.NormalizedFields[8].Action = NormalizationAction.Normalize;  // contniues
                // 10 - sulphates
                analyst.Script.Normalize.NormalizedFields[9].Action = NormalizationAction.Normalize;  // contniues
                // 11 - alcohol
                analyst.Script.Normalize.NormalizedFields[10].Action = NormalizationAction.Normalize; // contniues
                // output
                // 12 - quality
                analyst.Script.Normalize.NormalizedFields[11].Action = NormalizationAction.Equilateral; // discrete


                //Norm for Trainng
                var norm = new AnalystNormalizeCSV
                {
                    ProduceOutputHeaders = true
                };

                norm.Analyze(trainingFile, true, CSVFormat.English, analyst);
                norm.Normalize(normalisedTrainingFile);

                //Norm of evaluation
                norm.Analyze(Config.TestingFile, true, CSVFormat.English, analyst);
                norm.Normalize(normalisedTestingFile);

                //save the analyst file
                analyst.Save(analystFile);
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                throw;
            }
        }
Ejemplo n.º 20
0
        /// <summary> need comments </summary>
        public static List <string> Normalise(FileInfo sourceFile, FileInfo normalFile, string path, string dataFile, int outputs, bool inputs)
        {
            List <string> titles = new List <string>();

            //Setup analyst with orignal csv.
            var analyst = new EncogAnalyst();
            var wizard  = new AnalystWizard(analyst);

            //Additional validation to check that the file is not empty.
            try
            {
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);
            }
            catch (Exception)
            {
                MessageBox.Show("The file you have selected is empty.", "File Access Error");
                return(null);
            }

            if (analyst.Script.Fields.Length - 1 < outputs)
            {
                MessageBox.Show("You have specified " + outputs + " outputs but there are only " + analyst.Script.Fields.Length + " headings in the file.", "Too Many Outputs Error");
                return(null);
            }

            //Setup max and min range for normalization.
            foreach (AnalystField field in analyst.Script.Normalize.NormalizedFields)
            {
                field.NormalizedHigh = 1;
                field.NormalizedLow  = -1;
                //field.Action = Encog.Util.Arrayutil.NormalizationAction.OneOf; //Use this to change normalizaiton type.
            }

            analyst.Script.Normalize.MissingValues = new NegateMissing();

            //Normalization.
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;

            try
            {
                norm.Normalize(normalFile);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message + "\n\nUnknown application failure, please report this bug with a screenshot of" +
                                " the message to the relevant engineer.", "Normalisation Failure");
                return(null);
            }

            if (!inputs)
            {
                for (int i = outputs; i + analyst.Script.Fields.Length > analyst.Script.Fields.Length; i--)
                {
                    titles.Add(analyst.Script.Fields[analyst.Script.Fields.Length - i].Name);
                }
            }
            else
            {
                for (int i = 0; i < analyst.Script.Fields.Length - outputs; i++)
                {
                    titles.Add(analyst.Script.Fields[i].Name);
                }
            }

            //Save configuration to be used later.
            analyst.Save(new FileInfo(path + @"\normal\" + "normalizationData" + dataFile.Replace(".csv", ".ega")));

            return(titles);
        }
Ejemplo n.º 21
0
        /// <summary> Button to save current network settings </summary>
        private void NetworkSaveBtn_Click(object sender, EventArgs e)
        {
            //Setup paths and lists.
            string dataFile = "";
            string path     = "";

            //Normalise file.
            List <string> outputTitles = GetFile(false, ref path, ref dataFile, (int)(outputsUpDown.Value));
            List <string> inputTitles  = new List <string>();

            if (outputTitles == null)
            {
                return;
            }

            if (nameTxt.Text?.Length == 0)
            {
                MessageBox.Show("You must give your network a name.", "No Network Name.");
                return;
            }

            char[] dirty = Path.GetInvalidFileNameChars();

            foreach (char c in dirty)
            {
                if (nameTxt.Text.Contains(c.ToString()))
                {
                    MessageBox.Show("Your name contains invalid characters. Error at character " + c, "Invalid Network Name.");
                    return;
                }
            }

            //False when percentage split, true when cross validation.
            const bool validation = true;

            //Setup dataset.
            Data info = new Data();

            info = info.ReturnInfo(path + @"normal\" + dataFile.Replace(".csv", "Normal.csv"), outputTitles, 0, validation);

            //Load analyst from earlier.
            var analyst = new EncogAnalyst();

            analyst.Load(new FileInfo(path + @"normal\" + "normalizationData" + dataFile.Replace(".csv", "") + ".ega"));

            var sourcefile = new FileInfo(path + dataFile);

            var norm = new AnalystNormalizeCSV();

            norm.Analyze(sourcefile, true, CSVFormat.English, analyst);

            //Store input headings from analyst.
            for (int i = 0; i < analyst.Script.Fields.Length - outputTitles.Count; i++)
            {
                inputTitles.Add(analyst.Script.Fields[i].Name);
            }

            //Setup network.
            NeuralNetwork network;

            //If to decide which network type to use.
            if (radBtnEncog.Checked)
            {
                if (!deepNetworkBox.Checked)
                {
                    output.Text += "\n@Encog:\n\n";
                    network      = new EncogNeuralNetwork();
                }
                else
                {
                    output.Text += "\n@Deep Encog:\n\n";
                    network      = new EncogDeepNeuralNetwork();
                }
            }
            else
            {
                if (!deepNetworkBox.Checked)
                {
                    output.Text += "\n@Accord:\n\n";
                    network      = new AccordNeuralNetwork();
                }
                else
                {
                    output.Text += "\n@Deep Accord:\n\n";
                    network      = new AccordDeepNeuralNetwork();
                }
            }

            //Create network.
            network.Create(info.InputNumber, layersBar.Value, neuronsBar.Value, info.OutputNumber);

            //Save network to file.
            if (!Directory.Exists(path + "networks"))
            {
                Directory.CreateDirectory(path + "networks");
            }

            //network.Save(path + @"networks\" + nameTxt.Text);

            if ((path + @"networks\" + nameTxt.Text)?.Length < 260)
            {
                network.Save(path + @"networks\" + nameTxt.Text);
            }
            else
            {
                MessageBox.Show("Your file name or total file path is too long for the windows limit of 260.", "Invalid Network Name Size.");
                return;
            }

            //Save network data to object.
            NetworkSaveData networkSave = new NetworkSaveData
            {
                NetworkFile    = path + @"networks\" + nameTxt.Text,
                NetworkType    = network.GetType().ToString().Replace("ENP1.", ""), //"ENP1" must change to reflect solution name (name.) if ever changed.
                AnalystFile    = path + @"normal\" + "normalizationData.ega",
                CsvFile        = dataFile,
                Path           = path,
                InputHeadings  = inputTitles,
                OutputHeadings = outputTitles,
                Name           = nameTxt.Text,

                //Train network.
                Inaccuracy = Math.Round(network.Train(info, (float)(learningRateBar.Value) / 10, (float)(momentumBar.Value) / 10), 5).ToString()
            };

            //Write network object to json file.
            using (var sw = new StreamWriter(path + @"networks\networks.json", true))
            {
                using (var jsw = new JsonTextWriter(sw))
                {
                    //jsw.Formatting = Formatting.Indented;
                    JsonSerializer serial = new JsonSerializer();
                    serial.Serialize(jsw, networkSave);
                    sw.WriteLine();
                }
            }

            output.Text += "Successfully saved network " + nameTxt.Text + " with a training inaccuracy of: " + networkSave.Inaccuracy;
        }
Ejemplo n.º 22
0
        /// <summary> Button to test network settings. </summary>
        private void NetworkBtn_Click(object sender, EventArgs e)
        {
            //Setup paths and lists.
            string dataFile = "";
            string path     = "";

            //Normalise file.
            List <string> outputTitles = GetFile(false, ref path, ref dataFile, (int)(outputsUpDown.Value));

            if (outputTitles == null)
            {
                return;
            }

            //False when percentage split, true when cross validation.
            bool validation = !radBtnSplit.Checked;

            //Setup training dataset.
            Data info = new Data(); info = info.ReturnInfo(path + @"normal\" + dataFile.Replace(".csv", "Normal.csv"), outputTitles, sampleBar.Value, validation);

            if (info == null)
            {
                return;
            }

            //Load analyst from earlier.
            var analyst = new EncogAnalyst();

            analyst.Load(new FileInfo(path + @"normal\" + "normalizationData" + dataFile.Replace(".csv", "") + ".ega"));

            var sourcefile = new FileInfo(path + dataFile);

            var norm = new AnalystNormalizeCSV();

            norm.Analyze(sourcefile, true, CSVFormat.English, analyst);

            //Setup network.
            NeuralNetwork network;

            //If to decide which network type to use.
            if (radBtnEncog.Checked)
            {
                if (!deepNetworkBox.Checked)
                {
                    output.Text += "\n@Encog:\n\n";
                    network      = new EncogNeuralNetwork();
                }
                else
                {
                    output.Text += "\n@Deep Encog:\n\n";
                    network      = new EncogDeepNeuralNetwork();
                }
            }
            else
            {
                if (!deepNetworkBox.Checked)
                {
                    output.Text += "\n@Accord:\n\n";
                    network      = new AccordNeuralNetwork();
                }
                else
                {
                    output.Text += "\n@Deep Accord:\n\n";
                    network      = new AccordDeepNeuralNetwork();
                }
            }

            //If using cross-validation.
            if (validation)
            {
                //Setup pool size.
                decimal tmpPoolSize = info.InputData.Length * decimal.Divide(sampleBar.Value, 100);
                int     poolSize    = (int)tmpPoolSize;

                double[][] arrayIn = info.InputData; double[][] arrayOut = info.OutputData;

                info.InputData  = Data.CreateArray <double>(poolSize, info.InputData[0].Length);
                info.OutputData = Data.CreateArray <double>(poolSize, info.OutputData[0].Length);

                //Random to randomise pool selection.
                Random rnd = new Random();

                int[] index = new int[poolSize];

                //Radomly allocate items for training pool.
                for (int j = 0; j < info.InputData.Length; j++)
                {
                    index[j]          = rnd.Next(0, arrayIn.Length);
                    info.InputData[j] = arrayIn[index[j]]; info.OutputData[j] = arrayOut[index[j]];
                }

                //Remove pooled items from array.
                arrayIn  = Data.RemoveFromArray(arrayIn, index, poolSize);
                arrayOut = Data.RemoveFromArray(arrayOut, index, poolSize);

                //Start allocating sample pools.
                for (int i = 0; i <= arrayIn.Length / poolSize; i++)
                {
                    info.InputDataSample  = Data.CreateArray <double>(poolSize, arrayIn[0].Length);
                    info.OutputDataSample = Data.CreateArray <double>(poolSize, arrayOut[0].Length);

                    //Radomly allocate items for [i] sample pool.
                    for (int j = 0; j < info.InputDataSample.Length; j++)
                    {
                        index[j] = rnd.Next(0, arrayIn.Length);
                        info.InputDataSample[j] = arrayIn[index[j]]; info.OutputDataSample[j] = arrayOut[index[j]];
                    }

                    arrayIn  = Data.RemoveFromArray(arrayIn, index, poolSize);
                    arrayOut = Data.RemoveFromArray(arrayOut, index, poolSize);

                    //Create network.
                    network.Create(info.InputNumber, layersBar.Value, neuronsBar.Value, info.OutputNumber);
                    output.Text += "Training complete with an inaccuracy of: " + Math.Round(network.Train(info, (float)(learningRateBar.Value) / 10, (float)(momentumBar.Value) / 10), 10) + "\n\n";

                    double[][] answers = Data.CreateArray <double>(poolSize, info.InputData[0].Length);

                    //Compute outputs.
                    for (int j = 0; j < answers.Length; j++)
                    {
                        if (radBtnAccord.Checked)
                        {
                            if (!deepNetworkBox.Checked)
                            {
                                answers[j] = network.AccordNetwork.Compute(info.InputDataSample[j]);
                            }
                            else if (deepNetworkBox.Checked)
                            {
                                answers[j] = network.DeepAccordNetwork.Compute(info.InputDataSample[j]);
                            }
                        }
                        else
                        {
                            network.EncogNetwork.Compute(info.InputDataSample[j], answers[j]);
                        }
                    }

                    //Display network.
                    output.Text += network.Display(answers, analyst, info, outputTitles, path + @"normal\" + dataFile.Replace(".csv", "Normal.csv"));
                }
            }
            //Else percentage split.
            else
            {
                //Create network.
                network.Create(info.InputNumber, layersBar.Value, neuronsBar.Value, info.OutputNumber);
                output.Text += "Training complete with an inaccuracy of: " + Math.Round(network.Train(info, (float)(learningRateBar.Value) / 10, (float)(momentumBar.Value) / 10), 5) + "\n\n";

                double[][] answers = Data.CreateArray <double>(info.InputDataSample.Length, info.InputDataSample[0].Length);

                //Compute outputs.
                for (int i = 0; i < answers.Length; i++)
                {
                    if (radBtnAccord.Checked)
                    {
                        if (!deepNetworkBox.Checked)
                        {
                            answers[i] = network.AccordNetwork.Compute(info.InputDataSample[i]);
                        }
                        else if (deepNetworkBox.Checked)
                        {
                            answers[i] = network.DeepAccordNetwork.Compute(info.InputDataSample[i]);
                        }
                    }
                    else
                    {
                        network.EncogNetwork.Compute(info.InputDataSample[i], answers[i]);
                    }
                }

                //Display network.
                output.Text += network.Display(answers, analyst, info, outputTitles, path + @"normal\" + dataFile.Replace(".csv", "Normal.csv"));
            }
        }
Ejemplo n.º 23
0
        public static int epoch;           // to be provided by user



        private static void Main(string[] args)
        {
            Console.WriteLine("Press 1 for selecting  Regresssion and 2 for classification");
            int whatToperform = int.Parse(Console.ReadLine());


            Console.WriteLine("Please provide number of layers assuming first layer is input layer and last is output layer");
            int numberOfLayers = int.Parse(Console.ReadLine());



            var network = new BasicNetwork();

            for (int i = 1; i <= numberOfLayers; i++)
            {
                Console.WriteLine("Please select the activation function for layer- {0}", i);      // Activtion function Input
                Console.WriteLine("Press 1 for ActivationBiPolar ");
                Console.WriteLine("Press 2 for ActivationCompetitive  ");
                Console.WriteLine("Press 3 for ActivationLinear ");
                Console.WriteLine("Press 4 for ActivationLog  ");
                Console.WriteLine("Press 5 for ActivationSigmoid  ");
                Console.WriteLine("Press 6 for ActivationSoftMax ");
                Console.WriteLine("Press 7 for ActivationTanh  ");
                Console.WriteLine("Press 8 for default  ");
                int whichActivation = int.Parse(Console.ReadLine());


                Console.WriteLine("Please the bias for this layer : 1 for True and 0 for false ");       // Bias input
                int whichBias = int.Parse(Console.ReadLine());



                Console.WriteLine("Please the enter the neuron count for this layer");       // Neuron count input
                int countNeuron = int.Parse(Console.ReadLine());


                switch (whichActivation)       // building the network
                {
                case 1: network.AddLayer(new BasicLayer(new ActivationBiPolar(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 2: network.AddLayer(new BasicLayer(new ActivationCompetitive(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 3: network.AddLayer(new BasicLayer(new ActivationLinear(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 4: network.AddLayer(new BasicLayer(new ActivationLOG(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 5: network.AddLayer(new BasicLayer(new ActivationSigmoid(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 6: network.AddLayer(new BasicLayer(new ActivationSoftMax(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 7: network.AddLayer(new BasicLayer(new ActivationTANH(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 8: network.AddLayer(new BasicLayer(null, Convert.ToBoolean(whichBias), countNeuron));
                    break;

                default:
                    Console.WriteLine("Wrong data entered - Application will stop   ");
                    break;
                }
            }

            network.Structure.FinalizeStructure();      //complete the newtork settings
            network.Reset();

            Console.WriteLine("Please enter the learning rate ");       // learning rate input
            learningRate = double.Parse(Console.ReadLine());

            Console.WriteLine("Please enter the momentum value");       // Momentum input
            UMomentum = double.Parse(Console.ReadLine());

            Console.WriteLine("Please the enter the number of epochs ");       // epoch input
            epoch = int.Parse(Console.ReadLine());


            // For Regression we do this piece of code

            if (whatToperform == 1)
            {
                var sourceFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\regression_train.csv");                                               //fetch training file
                var targetFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\Result\khicharNormClassificationTrainData.csv"); //save train normalized file


                var sourceFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\regression_train.csv");                                              //fetch testing file
                var targetFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\Result\khicharNormClassificationTestData.csv"); //Save test normalized file

                //Analyst
                var analyst = new EncogAnalyst();
                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.TargetFieldName = "y";  //set the output variable  for regression . it is not necessary when using mutliple attributes
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                //norm for Training
                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);

                //norm for testing

                norm.Analyze(sourceFileTest, true, CSVFormat.English, analyst);
                norm.Normalize(targetFileTest);


                analyst.Save(new FileInfo("stt.ega"));



                var trainingset1 = EncogUtility.LoadCSV2Memory(targetFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);


                var train = new Backpropagation(network, trainingset1);
                int epo   = 1;
                do
                {
                    train.Iteration();
                    Console.WriteLine(@"Epoch #" + epo + @" Error:" + train.Error);
                    epo++;

                    if (epo > epoch)
                    {
                        break;
                    }
                } while (train.Error > 0.05);


                var evaluationSet = EncogUtility.LoadCSV2Memory(targetFileTest.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);



                List <Tuple <double, double> > inputExcel = new List <Tuple <double, double> >();

                foreach (var item in evaluationSet)
                {
                    var output = network.Compute(item.Input);

                    inputExcel.Add(new Tuple <double, double>(item.Input[0], output[0]));
                }


                PlotRegressionTest(inputExcel);

                Console.WriteLine("----------------Execution over - check the Regression output excel ------------------------------------");
                Console.ReadKey();
                EncogFramework.Instance.Shutdown();
            }

            //End of Regression


            //     For classification we do this piece of code

            if (whatToperform == 2)
            {
                // fetch train file
                var sourceFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\data.circles.test.1000.csv");
                var targetFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\result\khicharNormClassificationTrainData.csv");


                ///fetch test file
                var sourceFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\data.circles.test.1000.csv");
                var targetFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\result\khicharNormClassificationTestData.csv");

                //Analyst
                var analyst = new EncogAnalyst();
                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                //norm for Training
                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);

                //norm for testing

                norm.Analyze(sourceFileTest, true, CSVFormat.English, analyst);
                norm.Normalize(targetFileTest);


                analyst.Save(new FileInfo("stt.ega"));


                var trainingset1 = EncogUtility.LoadCSV2Memory(targetFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);


                var train = new Backpropagation(network, trainingset1);
                int epo   = 1;
                do
                {
                    train.Iteration();
                    Console.WriteLine(@"Epoch #" + epo + @" Error:" + train.Error);
                    epo++;

                    if (epo > epoch)
                    {
                        break;
                    }
                } while (train.Error > 0.05);


                var evaluationSet = EncogUtility.LoadCSV2Memory(targetFileTest.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);

                int count        = 0;
                int CorrectCount = 0;

                List <Tuple <double, double, double> > inputExcel = new List <Tuple <double, double, double> >();

                foreach (var item in evaluationSet)
                {
                    count++;
                    var output = network.Compute(item.Input);
                    // int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count;

                    int    classCount        = analyst.Script.Normalize.NormalizedFields[2].Classes.Count;
                    double normalizationHigh = analyst.Script.Normalize.NormalizedFields[2].NormalizedHigh;
                    double normalizationLow  = analyst.Script.Normalize.NormalizedFields[2].NormalizedLow;

                    var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                    var predictedClassInt = eq.Decode(output);
                    var predictedClass    = analyst.Script.Normalize.NormalizedFields[2].Classes[predictedClassInt].Name;
                    var idealClassInt     = eq.Decode(item.Ideal);
                    var idealClass        = analyst.Script.Normalize.NormalizedFields[2].Classes[idealClassInt].Name;

                    if (predictedClassInt == idealClassInt)
                    {
                        CorrectCount++;
                    }


                    inputExcel.Add(new Tuple <double, double, double>(item.Input[0], item.Input[1], Convert.ToDouble(predictedClass)));
                }



                Console.WriteLine("Total Test Count : {0}", count);
                Console.WriteLine("Total Correct Prediction Count : {0}", CorrectCount);
                Console.WriteLine("% Success : {0}", ((CorrectCount * 100.0) / count));
                PlotClassificationTest(inputExcel);

                Console.WriteLine("----------------Execution over - check the Classification output excel ------------------------------------");
                Console.ReadKey();
                EncogFramework.Instance.Shutdown();
            }        //End of classification
        }