public static void Main(string[] args) { // create data set from csv file MultiLayerPerceptron neuralNet = (MultiLayerPerceptron)NeuralNetwork.createFromFile("irisNet.nnet"); DataSet dataSet = DataSet.createFromFile("data_sets/iris_data_normalised.txt", 4, 3, ","); string[] classNames = new string[] { "Virginica", "Setosa", "Versicolor" }; CrossValidation crossval = new CrossValidation(neuralNet, dataSet, 5); crossval.addEvaluator(new ClassifierEvaluator.MultiClass(classNames)); crossval.run(); CrossValidationResult results = crossval.Result; Console.WriteLine(results); }
/// <param name="dataSet"> training set used for error estimation </param> /// <returns> neural network model with optimized architecture for provided data set </returns> public virtual NeuralNetwork createOptimalModel(DataSet dataSet) { List <int> neurons = new List <int>(); neurons.Add(minNeuronsPerLayer); findArchitectures(1, minNeuronsPerLayer, neurons); LOG.info("Total [{}] different network topologies found", allArchitectures.Count); foreach (List <int> architecture in allArchitectures) { architecture.Insert(0, dataSet.InputSize); architecture.Add(dataSet.OutputSize); LOG.info("Architecture: [{}]", architecture); MultiLayerPerceptron network = new MultiLayerPerceptron(architecture); LearningListener listener = new LearningListener(10, learningRule.MaxIterations); learningRule.addListener(listener); network.LearningRule = learningRule; errorEstimationMethod = new CrossValidation(network, dataSet, 10); errorEstimationMethod.run(); // FIX var evaluator = errorEstimationMethod.getEvaluator <ClassifierEvaluator.MultiClass>(typeof(ClassifierEvaluator.MultiClass)); ClassificationMetrics[] result = ClassificationMetrics.createFromMatrix(evaluator.Result); // nadji onaj sa najmanjim f measure if (optimalResult == null || optimalResult.FMeasure < result[0].FMeasure) { LOG.info("Architecture [{}] became optimal architecture with metrics {}", architecture, result); optimalResult = result[0]; optimalClassifier = network; optimalArchitecure = architecture; } LOG.info("#################################################################"); } LOG.info("Optimal Architecture: {}", optimalArchitecure); return(optimalClassifier); }