Example #1
0
        public string Evaluate(FileInfo TrainedNetworkFile, FileInfo AnalystFile, FileInfo NormalizedEvaluateFile)
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(TrainedNetworkFile);
            var analyst = new EncogAnalyst();

            analyst.Load(AnalystFile.ToString());
            var evaluationSet = EncogUtility.LoadCSV2Memory(NormalizedEvaluateFile.ToString(),
                                                            network.InputCount, network.OutputCount, true, CSVFormat.English, false);
            var career = string.Empty;

            foreach (var item in evaluationSet)
            {
                var output = network.Compute(item.Input);

                int    classCount        = analyst.Script.Normalize.NormalizedFields[3].Classes.Count;
                double normalizationHigh = analyst.Script.Normalize.NormalizedFields[3].NormalizedHigh;
                double normalizationLow  = analyst.Script.Normalize.NormalizedFields[3].NormalizedLow;

                var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                var predictedClassInt = eq.Decode(output);

                career = analyst.Script.Normalize.NormalizedFields[3].Classes[predictedClassInt].Name;
            }

            return(career);
        }
Example #2
0
        /// <summary>
        /// Metodo responsavel por avaliar a rede neural treinada com a massa de testes criada no metodo Segregate e normalizada no metodo Normalization
        /// </summary>
        private static void Evaluate()
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkClassificationFile);
            var analyst = new EncogAnalyst();

            analyst.Load(Config.AnalystClassificationFile.ToString());
            var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateClassificationFile.ToString(),
                                                            network.InputCount, network.OutputCount, true, CSVFormat.English, false);

            int count        = 0;
            int CorrectCount = 0;

            foreach (var item in evaluationSet)
            {
                count++;
                var output = network.Compute(item.Input);

                var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]);
                var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]);
                var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]);
                var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]);

                int    classCount        = analyst.Script.Normalize.NormalizedFields[4].Classes.Count;
                double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh;
                double normalizationLow  = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow;

                var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                var predictedClassInt = eq.Decode(output);
                var predictedClass    = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name;
                var idealClassInt     = eq.Decode(item.Ideal);
                var idealClass        = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name;

                if (predictedClassInt == idealClassInt)
                {
                    CorrectCount++;
                }
                Console.WriteLine("Count :{0} Properties [{1},{2},{3},{4}] ,Ideal : {5} Predicted : {6} ",
                                  count, sepal_l, sepal_w, petal_l, petal_w, idealClass, predictedClass);
            }

            Console.WriteLine("Quantidade de itens: {0}", count);
            Console.WriteLine("Quantidade de acertos: {0}", CorrectCount);
            Console.WriteLine("Porcentagem de acertos: {0}", ((CorrectCount * 100.0) / count));
        }
Example #3
0
        private static void Evaluate()
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile);
            var analyst = new EncogAnalyst();

            analyst.Load(Config.AnalystFile.ToString());
            var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateFile.ToString(), network.InputCount,
                                                            network.OutputCount, true, CSVFormat.English, false);

            var count        = 0;
            var correctCount = 0;

            foreach (var item in evaluationSet)
            {
                count++;
                var output             = network.Compute(item.Input);
                var analystNormalize   = analyst.Script.Normalize;
                var normalizedFields   = analystNormalize.NormalizedFields;
                var sourceElement      = normalizedFields[0].DeNormalize(item.Input[0]);
                var destinationElement = normalizedFields[1].DeNormalize(item.Input[1]);

                var classField        = normalizedFields[2];
                var classCount        = classField.Classes.Count;
                var normalizationHigh = classField.NormalizedHigh;
                var normalizationLow  = classField.NormalizedLow;

                var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                var predictedClassInt = eq.Decode(output);
                var predictedClass    = classField.Classes[predictedClassInt].Name;
                var idealClassInt     = eq.Decode(output);
                var idealClass        = classField.Classes[predictedClassInt].Name;

                if (predictedClassInt == idealClassInt)
                {
                    correctCount++;
                }
                Console.WriteLine("Count :{0} Properties [{1},{2}] ,Ideal : {3} Predicted : {4}", count, sourceElement, destinationElement, idealClass, predictedClass);
            }
            Console.WriteLine("Total Test Count : {0}", count);
            Console.WriteLine("Total Correct Predicted Count  : {0}", correctCount);
            Console.WriteLine("% Success : {0}", ((correctCount * 100.0) / count));
        }
Example #4
0
        public void EvaluateNetwork(FileInfo trainedNetwork, FileInfo analystFile, FileInfo normalisedTestFile, FileInfo finalResultsFile)
        {
            try
            {
                var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(trainedNetwork);
                var analyst = new EncogAnalyst();
                analyst.Load(analystFile.ToString());
                var evaluationSet = EncogUtility.LoadCSV2Memory(normalisedTestFile.ToString(),
                                                                network.InputCount, network.OutputCount, true, CSVFormat.English, false);

                using (var file = new StreamWriter(finalResultsFile.ToString()))
                {
                    foreach (var item in evaluationSet)
                    {
                        var normalizedActualoutput = (BasicMLData)network.Compute(item.Input);
                        //var actualoutput = analyst.Script.Normalize.NormalizedFields[11].DeNormalize(normalizedActualoutput.Data[0]);
                        //var idealOutput = analyst.Script.Normalize.NormalizedFields[11].DeNormalize(item.Ideal[0]);

                        int    classCount        = analyst.Script.Normalize.NormalizedFields[11].Classes.Count;
                        double normalizationHigh = analyst.Script.Normalize.NormalizedFields[11].NormalizedHigh;
                        double normalizationLow  = analyst.Script.Normalize.NormalizedFields[11].NormalizedLow;

                        var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                        var predictedClassInt = eq.Decode(normalizedActualoutput);
                        var idealClassInt     = eq.Decode(item.Ideal);

                        //Write to File
                        var resultLine = idealClassInt.ToString() + "," + predictedClassInt.ToString();
                        file.WriteLine(resultLine);
                        Console.WriteLine("Ideal : {0}, Actual : {1}", idealClassInt, predictedClassInt);
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                throw;
            }
        }
Example #5
0
        public static int epoch;           // to be provided by user



        private static void Main(string[] args)
        {
            Console.WriteLine("Press 1 for selecting  Regresssion and 2 for classification");
            int whatToperform = int.Parse(Console.ReadLine());


            Console.WriteLine("Please provide number of layers assuming first layer is input layer and last is output layer");
            int numberOfLayers = int.Parse(Console.ReadLine());



            var network = new BasicNetwork();

            for (int i = 1; i <= numberOfLayers; i++)
            {
                Console.WriteLine("Please select the activation function for layer- {0}", i);      // Activtion function Input
                Console.WriteLine("Press 1 for ActivationBiPolar ");
                Console.WriteLine("Press 2 for ActivationCompetitive  ");
                Console.WriteLine("Press 3 for ActivationLinear ");
                Console.WriteLine("Press 4 for ActivationLog  ");
                Console.WriteLine("Press 5 for ActivationSigmoid  ");
                Console.WriteLine("Press 6 for ActivationSoftMax ");
                Console.WriteLine("Press 7 for ActivationTanh  ");
                Console.WriteLine("Press 8 for default  ");
                int whichActivation = int.Parse(Console.ReadLine());


                Console.WriteLine("Please the bias for this layer : 1 for True and 0 for false ");       // Bias input
                int whichBias = int.Parse(Console.ReadLine());



                Console.WriteLine("Please the enter the neuron count for this layer");       // Neuron count input
                int countNeuron = int.Parse(Console.ReadLine());


                switch (whichActivation)       // building the network
                {
                case 1: network.AddLayer(new BasicLayer(new ActivationBiPolar(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 2: network.AddLayer(new BasicLayer(new ActivationCompetitive(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 3: network.AddLayer(new BasicLayer(new ActivationLinear(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 4: network.AddLayer(new BasicLayer(new ActivationLOG(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 5: network.AddLayer(new BasicLayer(new ActivationSigmoid(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 6: network.AddLayer(new BasicLayer(new ActivationSoftMax(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 7: network.AddLayer(new BasicLayer(new ActivationTANH(), Convert.ToBoolean(whichBias), countNeuron));
                    break;

                case 8: network.AddLayer(new BasicLayer(null, Convert.ToBoolean(whichBias), countNeuron));
                    break;

                default:
                    Console.WriteLine("Wrong data entered - Application will stop   ");
                    break;
                }
            }

            network.Structure.FinalizeStructure();      //complete the newtork settings
            network.Reset();

            Console.WriteLine("Please enter the learning rate ");       // learning rate input
            learningRate = double.Parse(Console.ReadLine());

            Console.WriteLine("Please enter the momentum value");       // Momentum input
            UMomentum = double.Parse(Console.ReadLine());

            Console.WriteLine("Please the enter the number of epochs ");       // epoch input
            epoch = int.Parse(Console.ReadLine());


            // For Regression we do this piece of code

            if (whatToperform == 1)
            {
                var sourceFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\regression_train.csv");                                               //fetch training file
                var targetFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\Result\khicharNormClassificationTrainData.csv"); //save train normalized file


                var sourceFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\regression_train.csv");                                              //fetch testing file
                var targetFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\Result\khicharNormClassificationTestData.csv"); //Save test normalized file

                //Analyst
                var analyst = new EncogAnalyst();
                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.TargetFieldName = "y";  //set the output variable  for regression . it is not necessary when using mutliple attributes
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                //norm for Training
                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);

                //norm for testing

                norm.Analyze(sourceFileTest, true, CSVFormat.English, analyst);
                norm.Normalize(targetFileTest);


                analyst.Save(new FileInfo("stt.ega"));



                var trainingset1 = EncogUtility.LoadCSV2Memory(targetFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);


                var train = new Backpropagation(network, trainingset1);
                int epo   = 1;
                do
                {
                    train.Iteration();
                    Console.WriteLine(@"Epoch #" + epo + @" Error:" + train.Error);
                    epo++;

                    if (epo > epoch)
                    {
                        break;
                    }
                } while (train.Error > 0.05);


                var evaluationSet = EncogUtility.LoadCSV2Memory(targetFileTest.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);



                List <Tuple <double, double> > inputExcel = new List <Tuple <double, double> >();

                foreach (var item in evaluationSet)
                {
                    var output = network.Compute(item.Input);

                    inputExcel.Add(new Tuple <double, double>(item.Input[0], output[0]));
                }


                PlotRegressionTest(inputExcel);

                Console.WriteLine("----------------Execution over - check the Regression output excel ------------------------------------");
                Console.ReadKey();
                EncogFramework.Instance.Shutdown();
            }

            //End of Regression


            //     For classification we do this piece of code

            if (whatToperform == 2)
            {
                // fetch train file
                var sourceFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\data.circles.test.1000.csv");
                var targetFile = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\result\khicharNormClassificationTrainData.csv");


                ///fetch test file
                var sourceFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\data.circles.test.1000.csv");
                var targetFileTest = new FileInfo(@"C:\Users\smandia\Desktop\Attachments_20161015\Attachments_20161029\result\khicharNormClassificationTestData.csv");

                //Analyst
                var analyst = new EncogAnalyst();
                //Wizard
                var wizard = new AnalystWizard(analyst);
                wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma);

                //norm for Training
                var norm = new AnalystNormalizeCSV();
                norm.Analyze(sourceFile, true, CSVFormat.English, analyst);
                norm.ProduceOutputHeaders = true;
                norm.Normalize(targetFile);

                //norm for testing

                norm.Analyze(sourceFileTest, true, CSVFormat.English, analyst);
                norm.Normalize(targetFileTest);


                analyst.Save(new FileInfo("stt.ega"));


                var trainingset1 = EncogUtility.LoadCSV2Memory(targetFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);


                var train = new Backpropagation(network, trainingset1);
                int epo   = 1;
                do
                {
                    train.Iteration();
                    Console.WriteLine(@"Epoch #" + epo + @" Error:" + train.Error);
                    epo++;

                    if (epo > epoch)
                    {
                        break;
                    }
                } while (train.Error > 0.05);


                var evaluationSet = EncogUtility.LoadCSV2Memory(targetFileTest.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false);

                int count        = 0;
                int CorrectCount = 0;

                List <Tuple <double, double, double> > inputExcel = new List <Tuple <double, double, double> >();

                foreach (var item in evaluationSet)
                {
                    count++;
                    var output = network.Compute(item.Input);
                    // int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count;

                    int    classCount        = analyst.Script.Normalize.NormalizedFields[2].Classes.Count;
                    double normalizationHigh = analyst.Script.Normalize.NormalizedFields[2].NormalizedHigh;
                    double normalizationLow  = analyst.Script.Normalize.NormalizedFields[2].NormalizedLow;

                    var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                    var predictedClassInt = eq.Decode(output);
                    var predictedClass    = analyst.Script.Normalize.NormalizedFields[2].Classes[predictedClassInt].Name;
                    var idealClassInt     = eq.Decode(item.Ideal);
                    var idealClass        = analyst.Script.Normalize.NormalizedFields[2].Classes[idealClassInt].Name;

                    if (predictedClassInt == idealClassInt)
                    {
                        CorrectCount++;
                    }


                    inputExcel.Add(new Tuple <double, double, double>(item.Input[0], item.Input[1], Convert.ToDouble(predictedClass)));
                }



                Console.WriteLine("Total Test Count : {0}", count);
                Console.WriteLine("Total Correct Prediction Count : {0}", CorrectCount);
                Console.WriteLine("% Success : {0}", ((CorrectCount * 100.0) / count));
                PlotClassificationTest(inputExcel);

                Console.WriteLine("----------------Execution over - check the Classification output excel ------------------------------------");
                Console.ReadKey();
                EncogFramework.Instance.Shutdown();
            }        //End of classification
        }
        /// <summary>
        /// Metodo responsavel por avaliar a rede neural treinada com a massa de testes criada no metodo Segregate e normalizada no metodo Normalization
        /// </summary>
        private static void Evaluate()
        {
            var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkClassificationFile);
            var analyst = new EncogAnalyst();
            analyst.Load(Config.AnalystClassificationFile.ToString());
            var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateClassificationFile.ToString(),
                network.InputCount, network.OutputCount, true, CSVFormat.English, false);

            int count = 0;
            int CorrectCount = 0;
            foreach (var item in evaluationSet)
            {
                count++;
                var output = network.Compute(item.Input);

                var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]);
                var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]);
                var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]);
                var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]);

                int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count;
                double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh;
                double normalizationLow = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow;

                var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow);
                var predictedClassInt = eq.Decode(output);
                var predictedClass = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name;
                var idealClassInt = eq.Decode(item.Ideal);
                var idealClass = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name;

                if (predictedClassInt == idealClassInt)
                {
                    CorrectCount++;
                }
                Console.WriteLine("Count :{0} Properties [{1},{2},{3},{4}] ,Ideal : {5} Predicted : {6} ",
                    count, sepal_l, sepal_w, petal_l, petal_w, idealClass, predictedClass);
            }

            Console.WriteLine("Quantidade de itens: {0}", count);
            Console.WriteLine("Quantidade de acertos: {0}", CorrectCount);
            Console.WriteLine("Porcentagem de acertos: {0}", ((CorrectCount * 100.0) / count));
        }