Ejemplo n.º 1
0
        /// <param name="dataSet"> training set used for error estimation </param>
        /// <returns> neural network model with optimized architecture for provided data set </returns>
        public virtual NeuralNetwork createOptimalModel(DataSet dataSet)
        {
            List <int> neurons = new List <int>();

            neurons.Add(minNeuronsPerLayer);
            findArchitectures(1, minNeuronsPerLayer, neurons);

            LOG.info("Total [{}] different network topologies found", allArchitectures.Count);

            foreach (List <int> architecture in allArchitectures)
            {
                architecture.Insert(0, dataSet.InputSize);
                architecture.Add(dataSet.OutputSize);

                LOG.info("Architecture: [{}]", architecture);

                MultiLayerPerceptron network  = new MultiLayerPerceptron(architecture);
                LearningListener     listener = new LearningListener(10, learningRule.MaxIterations);
                learningRule.addListener(listener);
                network.LearningRule = learningRule;

                errorEstimationMethod = new CrossValidation(network, dataSet, 10);
                errorEstimationMethod.run();
                // FIX
                var evaluator = errorEstimationMethod.getEvaluator <ClassifierEvaluator.MultiClass>(typeof(ClassifierEvaluator.MultiClass));

                ClassificationMetrics[] result = ClassificationMetrics.createFromMatrix(evaluator.Result);

                // nadji onaj sa najmanjim f measure
                if (optimalResult == null || optimalResult.FMeasure < result[0].FMeasure)
                {
                    LOG.info("Architecture [{}] became optimal architecture  with metrics {}", architecture, result);
                    optimalResult      = result[0];
                    optimalClassifier  = network;
                    optimalArchitecure = architecture;
                }

                LOG.info("#################################################################");
            }


            LOG.info("Optimal Architecture: {}", optimalArchitecure);
            return(optimalClassifier);
        }