예제 #1
0
        /// <param name="args"> the command line arguments </param>
        public static void Main(string[] args)
        {
            // test subsampling here too with some small dataset

//JAVA TO C# CONVERTER TODO TASK: Java wildcard generics are not converted to .NET:
//ORIGINAL LINE: org.neuroph.core.NeuralNetwork<?> nnet = org.neuroph.core.NeuralNetwork.createFromFile("MIcrNet1.nnet");
            NeuralNetwork nnet    = NeuralNetwork.createFromFile("MIcrNet1.nnet");
            DataSet       dataSet = DataSet.load("MICRData.tset");

            // get class labels from output neurons
            string[] classNames = new string[nnet.OutputsCount];             // = {"LeftHand", "RightHand", "Foot", "Rest"};
            int      i          = 0;

            foreach (Neuron n in nnet.OutputNeurons)
            {
                classNames[i] = n.Label;
                i++;
            }


            CrossValidation crossval = new CrossValidation(nnet, dataSet, 5);

            crossval.addEvaluator(new ClassifierEvaluator.MultiClass(classNames));             // add multi class here manualy to make it independent from data set
            // data set should hav ecolumn names when loading/creating , not hardcocd
            //   crossval.setSampling(null);

            crossval.run();
            CrossValidationResult results = crossval.Result;


            System.Console.WriteLine(results);

            // razmisli kako da uzmes rezultate i kako da ih prikazes u Neuroph studio - vuci ih direktno iz evaluatora
            // i kako da integrises ovo kroz training dialog -  samo dodati opciju KFold u trening dijalogu
            // tokom kfoldinga treba prikazivati gresku i desavanja tokom treninga - izlozi learning rule; napravi neki event listening za crossvalidaciju!!!
            // svaku istreniranu mrezu sacuvati, amozda negde i rezultate testiranja

            // potrebno je na kraju izracunati i srednju vrednost/statistikuu svih mera klasifikacije
            // takodje napravi boostraping - da radi ovo isto samo sa drugim sampling algoritmom


            //        System.out.println("MeanSquare Error: " + crossval.getEvaluator(ErrorEvaluator.class).getResult());
            //
            //        ClassificationEvaluator evaluator = crossval.getEvaluator(ClassificationEvaluator.MultiClass.class);
            //        ConfusionMatrix confusionMatrix = evaluator.getResult();
            //
            //        System.out.println("Confusion Matrix: \r\n"+confusionMatrix.toString());
            //
            //        System.out.println("Classification metrics: ");
            //
            //        ClassificationMetrics[] metrics = ClassificationMetrics.createFromMatrix(confusionMatrix);     // add all of these to result
            //        // createaverage statisticss from ClassificationMetrics
            //
            //        for(ClassificationMetrics cm : metrics)
            //            System.out.println(cm.toString());
            //
            //
        }
예제 #2
0
        // kfolding is done here
        // provide neural network and data set - thi is the main entry point for crossvalidation
        public virtual void run()
        {
            //   evaluation.addEvaluator(ClassificationEvaluator.createForDataSet(dataSet)); // this should be added elseewhere

            // create subsets of the entire datasets that will be used for k-folding
            List <DataSet> dataSets = sampling.sample(dataSet);

            results = new CrossValidationResult();

            //TODO Good place for parallelization. // But in order to make this possible NeuralNetwork must be cloneable or immutable
            for (int i = 0; i < dataSets.Count; i++)
            {
                neuralNetwork.randomizeWeights();                 // we shouldnt do this - we should clone the original network
                dataSets[i].Label = dataSet.Label + "-subset-" + i;
                neuralNetwork.learn(dataSets[i]);                 // train neural network with i-th data set fold

                for (int j = 0; j < dataSets.Count; j++)          // next do the testing with all other dataset folds
                {
                    if (j == i)
                    {
                        continue;                         // dont use for testing the same dataset that was used for training
                    }

                    // testNetwork(neuralNetwork, dataSets.get(j));
                    EvaluationResult evaluationResult = evaluation.evaluateDataSet(neuralNetwork, dataSets[j]);                     // this method should return all evaluation results
                    results.addEvaluationResult(evaluationResult);
                    //       results.add(result);
                    // get all the results from the single evaluation - for each evaluator Classifiaction and Error
                    // store it somewhere with neural network

                    // save evaluation results from multiple runs  and then calculateaverages

                    // we should also save all these trained network along w ith their evaluation results or at least store them intor array...
                    // ne need to store evaluation results and neural network for each run
                }
            }
            results.calculateStatistics();
        }