Exemplo n.º 1
0
        private static void Step3()
        {
            Console.WriteLine("Step 3: Normalize Training and Evaluation Data");

            //Analyst
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);

            //Norm for Training
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm for evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedEvaluateFile);

            analyst.Save(Config.AnalystFile);
        }
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            Console.WriteLine("Running wizard...");
            var analyst = new EncogAnalyst();

            var wizard = new AnalystWizard(analyst);

            wizard.TargetFieldName = "field:1";
            wizard.Wizard(sourceCSV,
                          false, AnalystFileFormat.DecpntComma);


            // customer id
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.PassThrough;

            var norm = new AnalystNormalizeCSV();

            norm.Report = new ConsoleStatusReportable();
            Console.WriteLine("Analyze for normalize...");
            norm.Analyze(sourceCSV, false, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            Console.WriteLine("Normalize...");
            norm.Normalize(targetCSV);
            analyst.Save(scriptEGA);
        }
Exemplo n.º 3
0
        /// <see cref="INetwork.Normalize"/>
        public INetwork Normalize()
        {
            //Analyst
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(BaseFile, true, AnalystFileFormat.DecpntComma);

            // Configure normalized fields
            ConfigureNormalizedFields(analyst);

            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(EvaluateFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedEvaluateFile);

            //Save the analyst file
            analyst.Save(AnalystFile);

            return(this);
        }
Exemplo n.º 4
0
        public void Wizard(AnalystGoal goal, WizardMethodType methodType, bool headers)
        {
            EncogAnalyst.MaxIteration = MaxIterations;
            var wiz = new AnalystWizard(EncogAnalyst)
            {
                Goal = goal, MethodType = methodType, EvidenceSegements = 3
            };

            wiz.Wizard(_rawFile, headers, FileFormat);

            EncogAnalyst.Save(_analystFile);
            EncogAnalyst.Load(_analystFile);
        }
Exemplo n.º 5
0
        static void Step3()
        {
            Console.WriteLine("Step 3: Normalize Training and Evaluation Data");

            //Analyst
            var analyst = new EncogAnalyst();


            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);
            //Cylinders
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;       // continues
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //weight
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Acceleration
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //year
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //Origin
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;     // multivalued discreste field type
            //Name
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;          // will not use
            //mpg
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;       // contniues



            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;

            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateFile);

            //save the analyst file
            analyst.Save(Config.AnalystFile);
        }
Exemplo n.º 6
0
        private void Normalize()
        {
            //Analyst
            var analyst = new EncogAnalyst();
            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.BaseFile, true, AnalystFileFormat.DecpntComma);
            //Cylinders
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //weight
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Acceleration
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //year
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Origin
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Name
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;
            //mpg
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;
            norm.Analyze(Config.TrainingFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingFile);

            //Norm of Cross Validation
            norm.Analyze(Config.CrossValidationFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedCrossValidationFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateFile);



            //save the analyst file
            analyst.Save(Config.AnalystFile);
        }
Exemplo n.º 7
0
        public void Normalize(FileInfo baseFile, FileInfo trainingFile, FileInfo normalizedTrainingFile, FileInfo evaluateFile, FileInfo normalizedEvaluateFile, FileInfo analystFile)
        {
            var encogAnalyst  = new EncogAnalyst();
            var analystWizard = new AnalystWizard(encogAnalyst);

            analystWizard.Wizard(baseFile, true, AnalystFileFormat.DecpntComma);

            var normalizer = new AnalystNormalizeCSV();

            normalizer.Analyze(trainingFile, true, CSVFormat.English, encogAnalyst);
            normalizer.ProduceOutputHeaders = true;
            normalizer.Normalize(normalizedTrainingFile);

            normalizer.Analyze(evaluateFile, true, CSVFormat.English, encogAnalyst);
            normalizer.Normalize(normalizedEvaluateFile);

            encogAnalyst.Save(analystFile);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Metodo responsavel por normalizar as informacoes para adequar a execucao da rede neural
        /// </summary>
        private static void Normalization()
        {
            var analyst = new EncogAnalyst();

            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.ClassificationFile, true, AnalystFileFormat.DecpntComma);

            var norm = new AnalystNormalizeCSV();

            norm.Analyze(Config.TrainingClassificationFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(Config.NormalizedTrainingClassificationFile);

            norm.Analyze(Config.EvaluateClassificationFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateClassificationFile);

            analyst.Save(Config.AnalystClassificationFile);
        }
        /// <summary>
        /// Metodo responsavel por normalizar as informacoes para adequar a execucao da rede neural
        /// </summary>
        private static void Normalization()
        {
            var analyst = new EncogAnalyst();

            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(Config.RegressionFile, true, AnalystFileFormat.DecpntComma);

            //Cilindros
            analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //displacement
            analyst.Script.Normalize.NormalizedFields[1].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //HorsePower
            analyst.Script.Normalize.NormalizedFields[2].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Peso
            analyst.Script.Normalize.NormalizedFields[3].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Aceleração
            analyst.Script.Normalize.NormalizedFields[4].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;
            //Ano
            analyst.Script.Normalize.NormalizedFields[5].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Origem
            analyst.Script.Normalize.NormalizedFields[6].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral;
            //Nome
            analyst.Script.Normalize.NormalizedFields[7].Action = Encog.Util.Arrayutil.NormalizationAction.Ignore;
            //MPG
            analyst.Script.Normalize.NormalizedFields[8].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize;

            var norm = new AnalystNormalizeCSV();

            norm.ProduceOutputHeaders = true;

            norm.Analyze(Config.TrainingRegressionFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedTrainingRegressionFile);

            //Norm of evaluation
            norm.Analyze(Config.EvaluateRegressionFile, true, CSVFormat.English, analyst);
            norm.Normalize(Config.NormalizedEvaluateRegressionFile);

            //save the analyst file
            analyst.Save(Config.AnalystRegressionFile);
        }
        public void IrisExample(FileInfo dir)
        {
            Console.WriteLine("Starting Iris dataset example.");
            var      url         = new Uri(IRIS_SOURCE);
            FileInfo analystFile = FileUtil.CombinePath(dir, "iris.ega");
            FileInfo rawFile     = FileUtil.CombinePath(dir, "iris_raw.csv");

            var encog = new EncogAnalyst();

            encog.AddAnalystListener(new ConsoleAnalystListener());
            var wiz = new AnalystWizard(encog);

            wiz.Wizard(url, analystFile, rawFile, false, AnalystFileFormat.DecpntComma);
            encog.Save(analystFile);

            encog.ExecuteTask("task-full");

            var report = new AnalystReport(encog);

            report.ProduceReport(FileUtil.CombinePath(dir, "report.html"));
        }
Exemplo n.º 11
0
        private void Normalize(FileInfo BaseFile, FileInfo TrainingFile, FileInfo NormalizedTrainingFile, FileInfo EvaluateFile, FileInfo NormalizedEvaluateFile, FileInfo AnalystFile)
        {
            //Analyst
            var analyst = new EncogAnalyst();
            //Wizard
            var wizard = new AnalystWizard(analyst);

            wizard.Wizard(BaseFile, true, AnalystFileFormat.DecpntComma);

            //Norm for Trainng
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(TrainingFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;
            norm.Normalize(NormalizedTrainingFile);

            //Norm of evaluation
            norm.Analyze(EvaluateFile, true, CSVFormat.English, analyst);
            norm.Normalize(NormalizedEvaluateFile);

            //save the analyst file
            analyst.Save(AnalystFile);
        }
Exemplo n.º 12
0
        public void Normalize(FileInfo baseFile, FileInfo trainingFile, FileInfo normalizedTrainingFile, FileInfo evaluateFile, FileInfo normalizedEvaluateFile, FileInfo analystFile)
        {
            var encogAnalyst  = new EncogAnalyst();
            var analystWizard = new AnalystWizard(encogAnalyst);

            analystWizard.Wizard(baseFile, true, AnalystFileFormat.DecpntComma);

            //Cylinders:
            encogAnalyst.Script.Normalize.NormalizedFields[0].Action = NormalizationAction.Equilateral;
            //Displacement:
            encogAnalyst.Script.Normalize.NormalizedFields[1].Action = NormalizationAction.Normalize;
            //Horsepower:
            encogAnalyst.Script.Normalize.NormalizedFields[2].Action = NormalizationAction.Normalize;
            //Weight:
            encogAnalyst.Script.Normalize.NormalizedFields[3].Action = NormalizationAction.Normalize;
            //Acceleration:
            encogAnalyst.Script.Normalize.NormalizedFields[4].Action = NormalizationAction.Normalize;
            //Year:
            encogAnalyst.Script.Normalize.NormalizedFields[5].Action = NormalizationAction.Equilateral;
            //Origin:
            encogAnalyst.Script.Normalize.NormalizedFields[6].Action = NormalizationAction.Equilateral;
            //Name:
            encogAnalyst.Script.Normalize.NormalizedFields[7].Action = NormalizationAction.Ignore;
            //Mpg:
            encogAnalyst.Script.Normalize.NormalizedFields[8].Action = NormalizationAction.Normalize;

            var normalizer = new AnalystNormalizeCSV();

            normalizer.Analyze(trainingFile, true, CSVFormat.English, encogAnalyst);
            normalizer.ProduceOutputHeaders = true;
            normalizer.Normalize(normalizedTrainingFile);

            normalizer.Analyze(evaluateFile, true, CSVFormat.English, encogAnalyst);
            normalizer.Normalize(normalizedEvaluateFile);

            encogAnalyst.Save(analystFile);
        }
        public void ForestExample(FileInfo dir)
        {
            Console.WriteLine("Starting forest cover dataset example.");
            var      url         = new Uri(FOREST_SOURCE);
            FileInfo analystFile = FileUtil.CombinePath(dir, "forest.ega");
            FileInfo rawFile     = FileUtil.CombinePath(dir, "forest_raw.csv");

            var encog = new EncogAnalyst();

            encog.AddAnalystListener(new ConsoleAnalystListener());
            var wiz = new AnalystWizard(encog);

            wiz.TaskBalance = true;

            wiz.Wizard(url, analystFile, rawFile, false, AnalystFileFormat.DecpntComma);
            encog.MaxIteration = 300;
            encog.ExecuteTask("task-full");

            encog.Save(analystFile);

            var report = new AnalystReport(encog);

            report.ProduceReport(FileUtil.CombinePath(dir, "report.html"));
        }
Exemplo n.º 14
0
        public static int epoch;           // to be provided by user



        private static void Main(string[] args)
        {
            Console.WriteLine("Press 1 for selecting  Regresssion and 2 for classification");
            int whatToperform = int.Parse(Console.ReadLine());


            Console.WriteLine("Please provide number of layers assuming first layer is input layer and last is output layer");
            int numberOfLayers = int.Parse(Console.ReadLine());



            var network = new BasicNetwork();

            for (int i = 1; i <= numberOfLayers; i++)
            {
                Console.WriteLine("Please select the activation function for layer- {0}", i);      // Activtion function Input
                Console.WriteLine("Press 1 for ActivationBiPolar ");
                Console.WriteLine("Press 2 for ActivationCompetitive  ");
                Console.WriteLine("Press 3 for ActivationLinear ");
                Console.WriteLine("Press 4 for ActivationLog  ");
                Console.WriteLine("Press 5 for ActivationSigmoid  ");
                Console.WriteLine("Press 6 for ActivationSoftMax ");
                Console.WriteLine("Press 7 for ActivationTanh  ");
                Console.WriteLine("Press 8 for default  ");
                int whichActivation = int.Parse(Console.ReadLine());


                Console.WriteLine("Please the bias for this layer : 1 for True and 0 for false ");       // Bias input
                int whichBias = int.Parse(Console.ReadLine());



                Console.WriteLine("Please the enter the neuron count for this layer");       // Neuron count input
                int countNeuron = int.Parse(Console.ReadLine());


                switch (whichActivation)       // building the network
                {
                case 1: network.AddLayer(new BasicLayer(new ActivationBiPolar(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 2: network.AddLayer(new BasicLayer(new ActivationCompetitive(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 3: network.AddLayer(new BasicLayer(new ActivationLinear(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 4: network.AddLayer(new BasicLayer(new ActivationLOG(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 5: network.AddLayer(new BasicLayer(new ActivationSigmoid(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 6: network.AddLayer(new BasicLayer(new ActivationSoftMax(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 7: network.AddLayer(new BasicLayer(new ActivationTANH(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 8: network.AddLayer(new BasicLayer(null, Convert.ToBoolean(whichBias), countNeuron));
                    break;

                default:
                    Console.WriteLine("Wrong data entered - Application will stop   ");
                    break;
                }
            }

            network.Structure.FinalizeStructure();      //complete the newtork settings
            network.Reset();

            Console.WriteLine("Please enter the learning rate ");       // learning rate input
            learningRate = double.Parse(Console.ReadLine());

            Console.WriteLine("Please enter the momentum value");       // Momentum input
            UMomentum = double.Parse(Console.ReadLine());

            Console.WriteLine("Please the enter the number of epochs ");       // epoch input
            epoch = int.Parse(Console.ReadLine());


            // For Regression we do this piece of code

            if (whatToperform == 1)
            {
                var sourceFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\regression_train.csv");                                               //fetch training file
                var targetFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\Result\khicharNormClassificationTrainData.csv"); //save train normalized file


                var sourceFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\regression_train.csv");                                              //fetch testing file
                var targetFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\Result\khicharNormClassificationTestData.csv"); //Save test normalized file

                //Analyst
                var analyst = new EncogAnalyst();
                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.TargetFieldName = "y";  //set the output variable  for regression . it is not necessary when using mutliple attributes
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                //norm for Training
                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);

                //norm for testing

                norm.Analyze(sourceFileTest, true, CSVFormat.English, analyst);
                norm.Normalize(targetFileTest);


                analyst.Save(new FileInfo("stt.ega"));



                var trainingset1 = EncogUtility.LoadCSV2Memory(targetFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);


                var train = new Backpropagation(network, trainingset1);
                int epo   = 1;
                do
                {
                    train.Iteration();
                    Console.WriteLine(@"Epoch #" + epo + @" Error:" + train.Error);
                    epo++;

                    if (epo > epoch)
                    {
                        break;
                    }
                } while (train.Error > 0.05);


                var evaluationSet = EncogUtility.LoadCSV2Memory(targetFileTest.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);



                List <Tuple <double, double> > inputExcel = new List <Tuple <double, double> >();

                foreach (var item in evaluationSet)
                {
                    var output = network.Compute(item.Input);

                    inputExcel.Add(new Tuple <double, double>(item.Input[0], output[0]));
                }


                PlotRegressionTest(inputExcel);

                Console.WriteLine("----------------Execution over - check the Regression output excel ------------------------------------");
                Console.ReadKey();
                EncogFramework.Instance.Shutdown();
            }

            //End of Regression


            //     For classification we do this piece of code

            if (whatToperform == 2)
            {
                // fetch train file
                var sourceFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\data.circles.test.1000.csv");
                var targetFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\result\khicharNormClassificationTrainData.csv");


                ///fetch test file
                var sourceFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\data.circles.test.1000.csv");
                var targetFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\result\khicharNormClassificationTestData.csv");

                //Analyst
                var analyst = new EncogAnalyst();
                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                //norm for Training
                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);

                //norm for testing

                norm.Analyze(sourceFileTest, true, CSVFormat.English, analyst);
                norm.Normalize(targetFileTest);


                analyst.Save(new FileInfo("stt.ega"));


                var trainingset1 = EncogUtility.LoadCSV2Memory(targetFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);


                var train = new Backpropagation(network, trainingset1);
                int epo   = 1;
                do
                {
                    train.Iteration();
                    Console.WriteLine(@"Epoch #" + epo + @" Error:" + train.Error);
                    epo++;

                    if (epo > epoch)
                    {
                        break;
                    }
                } while (train.Error > 0.05);


                var evaluationSet = EncogUtility.LoadCSV2Memory(targetFileTest.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);

                int count        = 0;
                int CorrectCount = 0;

                List <Tuple <double, double, double> > inputExcel = new List <Tuple <double, double, double> >();

                foreach (var item in evaluationSet)
                {
                    count++;
                    var output = network.Compute(item.Input);
                    // int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count;

                    int    classCount        = analyst.Script.Normalize.NormalizedFields[2].Classes.Count;
                    double normalizationHigh = analyst.Script.Normalize.NormalizedFields[2].NormalizedHigh;
                    double normalizationLow  = analyst.Script.Normalize.NormalizedFields[2].NormalizedLow;

                    var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                    var predictedClassInt = eq.Decode(output);
                    var predictedClass    = analyst.Script.Normalize.NormalizedFields[2].Classes[predictedClassInt].Name;
                    var idealClassInt     = eq.Decode(item.Ideal);
                    var idealClass        = analyst.Script.Normalize.NormalizedFields[2].Classes[idealClassInt].Name;

                    if (predictedClassInt == idealClassInt)
                    {
                        CorrectCount++;
                    }


                    inputExcel.Add(new Tuple <double, double, double>(item.Input[0], item.Input[1], Convert.ToDouble(predictedClass)));
                }



                Console.WriteLine("Total Test Count : {0}", count);
                Console.WriteLine("Total Correct Prediction Count : {0}", CorrectCount);
                Console.WriteLine("% Success : {0}", ((CorrectCount * 100.0) / count));
                PlotClassificationTest(inputExcel);

                Console.WriteLine("----------------Execution over - check the Classification output excel ------------------------------------");
                Console.ReadKey();
                EncogFramework.Instance.Shutdown();
            }        //End of classification
        }
Exemplo n.º 15
0
        /// <summary>
        /// Create normalised traning, testing files with encoh analist file
        /// </summary>
        /// <param name="baseFile"></param>
        /// <param name="trainingFile"></param>
        /// <param name="testingFile"></param>
        /// <param name="normalisedTrainingFile"></param>
        /// <param name="normalisedTestingFile"></param>
        /// <param name="analystFile"></param>
        public void Normalise(FileInfo baseFile, FileInfo trainingFile, FileInfo testingFile, FileInfo normalisedTrainingFile, FileInfo normalisedTestingFile, FileInfo analystFile)
        {
            try
            {
                //Analyst
                var analyst = new EncogAnalyst();

                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.Wizard(baseFile, true, AnalystFileFormat.DecpntComma);

                // inputs
                // 1 - fixed acidity
                analyst.Script.Normalize.NormalizedFields[0].Action = NormalizationAction.Normalize;  // contniues
                // 2 - volatile acidity
                analyst.Script.Normalize.NormalizedFields[1].Action = NormalizationAction.Normalize;  // contniues
                // 3 - citric acid
                analyst.Script.Normalize.NormalizedFields[2].Action = NormalizationAction.Normalize;  // contniues
                // 4 - residual sugar
                analyst.Script.Normalize.NormalizedFields[3].Action = NormalizationAction.Normalize;  // contniues
                // 5 - chlorides
                analyst.Script.Normalize.NormalizedFields[4].Action = NormalizationAction.Normalize;  // contniues
                // 6 - free sulfur dioxide
                analyst.Script.Normalize.NormalizedFields[5].Action = NormalizationAction.Normalize;  // discrete
                // 7 - total sulfur dioxide
                analyst.Script.Normalize.NormalizedFields[6].Action = NormalizationAction.Normalize;  // discrete
                // 8 - density
                analyst.Script.Normalize.NormalizedFields[7].Action = NormalizationAction.Normalize;  // contniues
                // 9 - pH
                analyst.Script.Normalize.NormalizedFields[8].Action = NormalizationAction.Normalize;  // contniues
                // 10 - sulphates
                analyst.Script.Normalize.NormalizedFields[9].Action = NormalizationAction.Normalize;  // contniues
                // 11 - alcohol
                analyst.Script.Normalize.NormalizedFields[10].Action = NormalizationAction.Normalize; // contniues
                // output
                // 12 - quality
                analyst.Script.Normalize.NormalizedFields[11].Action = NormalizationAction.Equilateral; // discrete


                //Norm for Trainng
                var norm = new AnalystNormalizeCSV
                {
                    ProduceOutputHeaders = true
                };

                norm.Analyze(trainingFile, true, CSVFormat.English, analyst);
                norm.Normalize(normalisedTrainingFile);

                //Norm of evaluation
                norm.Analyze(Config.TestingFile, true, CSVFormat.English, analyst);
                norm.Normalize(normalisedTestingFile);

                //save the analyst file
                analyst.Save(analystFile);
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                throw;
            }
        }
Exemplo n.º 16
0
        private void WizardCommand()
        {
            String targetCSVFile = _cmd.Args[0];

            String egaFile = FileUtil.ForceExtension(targetCSVFile, "ega");

            var  analyst = new EncogAnalyst();
            var  wizard  = new AnalystWizard(analyst);
            bool headers = _cmd.PromptBoolean("headers", true);

            AnalystFileFormat format =
                ConvertStringConst.String2AnalystFileFormat(_cmd.PromptString("format", "decpnt|comma"));

            String goal = _cmd.PromptString("goal", "c").ToLower().Trim();

            if (goal.Equals("c"))
            {
                wizard.Goal = AnalystGoal.Classification;
            }
            else if (goal.Equals("r"))
            {
                wizard.Goal = AnalystGoal.Regression;
            }
            else
            {
                Console.WriteLine(@"Invalid goal: " + goal);
                return;
            }

            wizard.TargetFieldName = _cmd.PromptString("targetField", "");

            String m = _cmd.PromptString("method", "ff").ToLower();

            if (m.Equals("ff"))
            {
                wizard.MethodType = WizardMethodType.FeedForward;
            }
            else if (m.Equals("neat"))
            {
                wizard.MethodType = WizardMethodType.NEAT;
            }
            else if (m.Equals("pnn"))
            {
                wizard.MethodType = WizardMethodType.PNN;
            }
            else if (m.Equals("rbf"))
            {
                wizard.MethodType = WizardMethodType.RBF;
            }
            else if (m.Equals("som"))
            {
                wizard.MethodType = WizardMethodType.SOM;
            }
            else if (m.Equals("svm"))
            {
                wizard.MethodType = WizardMethodType.SVM;
            }
            else
            {
                Console.WriteLine(@"Invalid method: " + m);
                return;
            }

            String r = _cmd.PromptString("range", "-1t1").Trim().ToLower();

            if (r.Equals("-1t1"))
            {
                wizard.Range = NormalizeRange.NegOne2One;
            }
            else if (r.Equals("0t1"))
            {
                wizard.Range = NormalizeRange.Zero2One;
            }

            wizard.Missing = TranslateMissing(_cmd.PromptString("missing", "DiscardMissing"));

            wizard.LagWindowSize      = _cmd.PromptInteger("lagWindow", 0);
            wizard.LeadWindowSize     = _cmd.PromptInteger("leadWindow", 0);
            wizard.IncludeTargetField = _cmd.PromptBoolean("includeTarget", false);
            wizard.TaskNormalize      = _cmd.PromptBoolean("normalize", true);
            wizard.TaskRandomize      = _cmd.PromptBoolean("randomize", true);
            wizard.TaskSegregate      = _cmd.PromptBoolean("segregate", true);
            wizard.TaskBalance        = _cmd.PromptBoolean("balance", false);
            wizard.TaskCluster        = _cmd.PromptBoolean("cluster", false);

            _sw.Start();
            Console.WriteLine(@"Analyzing data");
            wizard.Wizard(new FileInfo(targetCSVFile), headers, format);
            Console.WriteLine(@"Saving analyst file");
            analyst.Save(egaFile);
        }
Exemplo n.º 17
0
        /// <summary> need comments </summary>
        public static List <string> Normalise(FileInfo sourceFile, FileInfo normalFile, string path, string dataFile, int outputs, bool inputs)
        {
            List <string> titles = new List <string>();

            //Setup analyst with orignal csv.
            var analyst = new EncogAnalyst();
            var wizard  = new AnalystWizard(analyst);

            //Additional validation to check that the file is not empty.
            try
            {
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);
            }
            catch (Exception)
            {
                MessageBox.Show("The file you have selected is empty.", "File Access Error");
                return(null);
            }

            if (analyst.Script.Fields.Length - 1 < outputs)
            {
                MessageBox.Show("You have specified " + outputs + " outputs but there are only " + analyst.Script.Fields.Length + " headings in the file.", "Too Many Outputs Error");
                return(null);
            }

            //Setup max and min range for normalization.
            foreach (AnalystField field in analyst.Script.Normalize.NormalizedFields)
            {
                field.NormalizedHigh = 1;
                field.NormalizedLow  = -1;
                //field.Action = Encog.Util.Arrayutil.NormalizationAction.OneOf; //Use this to change normalizaiton type.
            }

            analyst.Script.Normalize.MissingValues = new NegateMissing();

            //Normalization.
            var norm = new AnalystNormalizeCSV();

            norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
            norm.ProduceOutputHeaders = true;

            try
            {
                norm.Normalize(normalFile);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message + "\n\nUnknown application failure, please report this bug with a screenshot of" +
                                " the message to the relevant engineer.", "Normalisation Failure");
                return(null);
            }

            if (!inputs)
            {
                for (int i = outputs; i + analyst.Script.Fields.Length > analyst.Script.Fields.Length; i--)
                {
                    titles.Add(analyst.Script.Fields[analyst.Script.Fields.Length - i].Name);
                }
            }
            else
            {
                for (int i = 0; i < analyst.Script.Fields.Length - outputs; i++)
                {
                    titles.Add(analyst.Script.Fields[i].Name);
                }
            }

            //Save configuration to be used later.
            analyst.Save(new FileInfo(path + @"\normal\" + "normalizationData" + dataFile.Replace(".csv", ".ega")));

            return(titles);
        }