Esempio n. 1
0
        /// <summary>
        /// Creates and returns new neural network for image recognition.
        /// Assumes that all of the FractionRgbData objects in the given map have identical
        /// length arrays in them so that the input layer of the neural network can be
        /// created here.
        /// </summary>
        /// <param name="label"> neural network label </param>
        /// <param name="samplingResolution"> sampling resolution (image size) </param>
        /// <param name="imageLabels"> image labels </param>
        /// <param name="layersNeuronsCount"> neuron counts in hidden layers </param>
        /// <param name="transferFunctionType"> type of transfer function to use for neurons in network </param>
        /// <param name="colorMode"> color mode </param>
        /// <returns>  </returns>
        public static NeuralNetwork createNewNeuralNetwork(string label, Dimension samplingResolution, ColorMode colorMode, List <string> imageLabels, List <int?> layersNeuronsCount, TransferFunctionType transferFunctionType)
        {
            int numberOfInputNeurons;

            if ((colorMode == ColorMode.COLOR_RGB) || (colorMode == ColorMode.COLOR_HSL))                             // for full color rgb or hsl
            {
                numberOfInputNeurons = 3 * samplingResolution.Width * samplingResolution.Height;
            }                             // for black n white network
            else
            {
                numberOfInputNeurons = samplingResolution.Width * samplingResolution.Height;
            }

            int numberOfOuputNeurons = imageLabels.Count;

            layersNeuronsCount.Insert(0, numberOfInputNeurons);
            layersNeuronsCount.Add(numberOfOuputNeurons);

            Console.WriteLine("Neuron layer size counts vector = " + layersNeuronsCount);

            NeuralNetwork neuralNetwork = new MultiLayerPerceptron(layersNeuronsCount, transferFunctionType);

            neuralNetwork.Label = label;
            PluginBase imageRecognitionPlugin = new ImageRecognitionPlugin(samplingResolution, colorMode);

            neuralNetwork.addPlugin(imageRecognitionPlugin);

            assignLabelsToOutputNeurons(neuralNetwork, imageLabels);
            neuralNetwork.LearningRule = new MomentumBackpropagation();

            return(neuralNetwork);
        }
Esempio n. 2
0
        public static void Main(string[] args)
        {
            Evaluation evaluation = new Evaluation();

            evaluation.addEvaluator(new ErrorEvaluator(new MeanSquaredError()));

            string[] classNames = new string[] { "Virginica", "Setosa", "Versicolor" };


            MultiLayerPerceptron neuralNet = (MultiLayerPerceptron)NeuralNetwork.createFromFile("irisNet.nnet");
            DataSet dataSet = DataSet.createFromFile("data_sets/iris_data_normalised.txt", 4, 3, ",");

            evaluation.addEvaluator(new ClassifierEvaluator.MultiClass(classNames));
            evaluation.evaluateDataSet(neuralNet, dataSet);

            ClassifierEvaluator evaluator       = evaluation.getEvaluator(typeof(ClassifierEvaluator.MultiClass));
            ConfusionMatrix     confusionMatrix = evaluator.Result;

            Console.WriteLine("Confusion matrrix:\r\n");
            Console.WriteLine(confusionMatrix.ToString() + "\r\n\r\n");
            Console.WriteLine("Classification metrics\r\n");
            ClassificationMetrics[]     metrics = ClassificationMetrics.createFromMatrix(confusionMatrix);
            ClassificationMetrics.Stats average = ClassificationMetrics.average(metrics);
            foreach (ClassificationMetrics cm in metrics)
            {
                Console.WriteLine(cm.ToString() + "\r\n");
            }
            Console.WriteLine(average.ToString());
        }
Esempio n. 3
0
        public static void Main(string[] args)
        {
            //create training set from Data.DIGITS
            DataSet dataSet = generateTraining();

            int inputCount    = Data.CHAR_HEIGHT * Data.CHAR_WIDTH;
            int outputCount   = Data.DIGITS.Length;
            int hiddenNeurons = 19;


            //create neural network
            MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputCount, hiddenNeurons, outputCount);
            //get backpropagation learning rule from network
            BackPropagation learningRule = neuralNet.LearningRule;

            learningRule.LearningRate  = 0.5;
            learningRule.MaxError      = 0.001;
            learningRule.MaxIterations = 5000;

            //add learning listener in order to print out training info
            learningRule.addListener(new LearningEventListenerAnonymousInnerClassHelper());

            //train neural network
            neuralNet.learn(dataSet);

            //train the network with training set
            testNeuralNetwork(neuralNet, dataSet);
        }
    /// <summary>
    /// Constrói uma rede neural a partir do genoma
    /// </summary>
    /// <returns>Rede neural para o genoma</returns>
    public MultiLayerPerceptron buildNeuralNetwork()
    {
        MultiLayerPerceptron neuralNetwork = new MultiLayerPerceptron(id, inputLayerSize, hiddenLayerSize, outputLayerSize);

        int weightIndex = 0;

        for (int i = 0; i < neuralNetwork.HiddenLayer1Size; i++)
        {
            for (int j = 0; j < neuralNetwork.HiddenLayer1.NeuronList[i].WeightSize; j++)
            {
                neuralNetwork.HiddenLayer1.NeuronList[i].Weights[j] = this.weight[weightIndex++];
            }
        }

        for (int i = 0; i < neuralNetwork.HiddenLayer2Size; i++)
        {
            for (int j = 0; j < neuralNetwork.HiddenLayer2.NeuronList[i].WeightSize; j++)
            {
                neuralNetwork.HiddenLayer2.NeuronList[i].Weights[j] = this.weight[weightIndex++];
            }
        }

        for (int i = 0; i < neuralNetwork.OutputLayerSize; i++)
        {
            for (int j = 0; j < neuralNetwork.OutputLayer.NeuronList[i].WeightSize; j++)
            {
                neuralNetwork.OutputLayer.NeuronList[i].Weights[j] = this.weight[weightIndex++];
            }
        }

        return(neuralNetwork);
    }
        /// <summary>
        /// Runs this sample
        /// </summary>
        public static void Main(string[] args)
        {
            // create training set (logical XOR function)
            DataSet trainingSet = new DataSet(2, 1);

            trainingSet.addRow(new DataSetRow(new double[] { 0, 0 }, new double[] { 0 }));
            trainingSet.addRow(new DataSetRow(new double[] { 0, 1 }, new double[] { 1 }));
            trainingSet.addRow(new DataSetRow(new double[] { 1, 0 }, new double[] { 1 }));
            trainingSet.addRow(new DataSetRow(new double[] { 1, 1 }, new double[] { 0 }));

            // create multi layer perceptron
            MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1);

            // set ResilientPropagation learning rule
            myMlPerceptron.LearningRule = new ResilientPropagation();

            // learn the training set
            Console.WriteLine("Training neural network...");
            myMlPerceptron.learn(trainingSet);

            int iterations = ((SupervisedLearning)myMlPerceptron.LearningRule).CurrentIteration;

            Console.WriteLine("Learned in " + iterations + " iterations");

            // test perceptron
            Console.WriteLine("Testing trained neural network");
            testNeuralNetwork(myMlPerceptron, trainingSet);
        }
Esempio n. 6
0
        /// <summary>
        /// Creates and returns a new instance of Multi Layer Perceptron </summary>
        /// <param name="layersStr"> space separated number of neurons in layers </param>
        /// <param name="transferFunctionType"> transfer function type for neurons </param>
        /// <returns> instance of Multi Layer Perceptron </returns>
        public static MultiLayerPerceptron createMLPerceptron(string layersStr, TransferFunctionType transferFunctionType)
        {
            List <int>           layerSizes = VectorParser.parseInteger(layersStr);
            MultiLayerPerceptron nnet       = new MultiLayerPerceptron(layerSizes, transferFunctionType);

            return(nnet);
        }
Esempio n. 7
0
        /// <summary>
        /// Benchmrk preparation consists of training set and neural networ creatiion.
        /// This method generates training set with 100 rows, where every row has 10 input and 5 output elements
        /// Neural network has two hiddden layers with 8 and 7 neurons, and runs learning rule for 2000 iterations
        /// </summary>
        public override void prepareTest()
        {
            int trainingSetSize = 100;
            int inputSize       = 10;
            int outputSize      = 5;

            this.trainingSet = new DataSet(inputSize, outputSize);

            for (int i = 0; i < trainingSetSize; i++)
            {
                double[] input = new double[inputSize];
                for (int j = 0; j < inputSize; j++)
                {
                    input[j] = new Random(1).NextDouble();
                }

                double[] output = new double[outputSize];
                for (int j = 0; j < outputSize; j++)
                {
                    output[j] = new Random(2).NextDouble();
                }

                DataSetRow trainingSetRow = new DataSetRow(input, output);
                trainingSet.addRow(trainingSetRow);
            }


            network = new MultiLayerPerceptron(inputSize, 8, 7, outputSize);
            ((MomentumBackpropagation)network.LearningRule).MaxIterations = 2000;
        }
Esempio n. 8
0
        /// <summary>
        /// Test JMLNeurophClassifier
        /// </summary>
        /// <param name="jmlDataset"> Dataset Java-ML data set </param>
        private static void testJMLNeurophClassifier(Dataset jmlDataset)
        {
            MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(4, 16, 3);

            // set labels for output neurons
            neuralNet.OutputNeurons[0].Label = "Setosa";
            neuralNet.OutputNeurons[1].Label = "Versicolor";
            neuralNet.OutputNeurons[2].Label = "Virginica";

            // initialize NeurophJMLClassifier
            JMLNeurophClassifier jmlnClassifier = new JMLNeurophClassifier(neuralNet);

            // Process Java-ML data set
            jmlnClassifier.buildClassifier(jmlDataset);

            // test item
            //double[] item = {5.1, 3.5, 1.4, 0.2}; // normalized item is below
            double[] item = new double[] { -0.27777777777777773, 0.1249999999999999, -0.4322033898305085, -0.45833333333333337 };

            // Java-ML instance out of test item
            Instance instance = new DenseInstance(item);

            // why are these not normalised?
            Console.WriteLine("NeurophJMLClassifier - classify of {0.22222222222222213, 0.6249999999999999, 0.06779661016949151, 0.04166666666666667}");
            Console.WriteLine(jmlnClassifier.classify(instance));
            Console.WriteLine("NeurophJMLClassifier - classDistribution of {0.22222222222222213, 0.6249999999999999, 0.06779661016949151, 0.04166666666666667}");
            Console.WriteLine(jmlnClassifier.classDistribution(instance));
        }
        public void LearnXORTest()
        {
            float [][] inputs  = new float [4][];
            float[][]  targets = new float [4][];
            inputs[0]  = new float[] { 0, 0 };
            inputs[1]  = new float[] { 0, 1 };
            inputs[2]  = new float[] { 1, 0 };
            inputs[3]  = new float[] { 1, 1 };
            targets[0] = new float[] { 0 };
            targets[1] = new float[] { 1 };
            targets[2] = new float[] { 1 };
            targets[3] = new float[] { 0 };
            MultiLayerPerceptron mlp = new MultiLayerPerceptron(new int [] { 2, 2, 1 });

            mlp.set_learning_rate(0.2f);
            mlp.set_eligibility(0.1f);
            for (int i = 0; i < 10000; i++)
            {
                mlp.train_batch(inputs, targets);
            }
            ToolsArray.print(mlp.estimate(inputs[0]));
            ToolsArray.print(mlp.estimate(inputs[1]));
            ToolsArray.print(mlp.estimate(inputs[2]));
            ToolsArray.print(mlp.estimate(inputs[3]));
        }
Esempio n. 10
0
        public static void Main(string[] args)
        {
            System.Console.WriteLine("Time stamp N1:" + DateTime.Now.ToString("dd-MMM-yyyy HH:mm:ss:MM"));

            int           maxIterations = 10000;
            NeuralNetwork neuralNet     = new MultiLayerPerceptron(4, 9, 1);

            ((LMS)neuralNet.LearningRule).MaxError      = 0.001;         //0-1
            ((LMS)neuralNet.LearningRule).LearningRate  = 0.7;           //0-1
            ((LMS)neuralNet.LearningRule).MaxIterations = maxIterations; //0-1
            DataSet trainingSet = new DataSet(4, 1);

            double daxmax = 10000.0D;

            trainingSet.addRow(new DataSetRow(new double[] { 3710.0D / daxmax, 3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax }, new double[] { 3666.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax }, new double[] { 3692.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax }, new double[] { 3886.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax }, new double[] { 3914.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax }, new double[] { 3956.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax }, new double[] { 3953.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax }, new double[] { 4044.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax }, new double[] { 3987.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax }, new double[] { 3996.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax }, new double[] { 4043.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax }, new double[] { 4068.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax }, new double[] { 4176.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax }, new double[] { 4187.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax }, new double[] { 4223.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax }, new double[] { 4259.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax }, new double[] { 4203.0D / daxmax }));
            trainingSet.addRow(new DataSetRow(new double[] { 4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax }, new double[] { 3989.0D / daxmax }));
            neuralNet.learn(trainingSet);
            System.Console.WriteLine("Time stamp N2:" + DateTime.Now.ToString("dd-MMM-yyyy HH:mm:ss:MM"));

            DataSet testSet = new DataSet(4, 1);

            testSet.addRow(new DataSetRow(new double[] { 4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax, 3989.0D / daxmax }));

            foreach (DataSetRow testDataRow in testSet.Rows)
            {
                neuralNet.Input = testDataRow.Input;
                neuralNet.calculate();
                double[] networkOutput = neuralNet.Output;
                Console.Write("Input: " + testDataRow.Input.Aggregate("", (x, y) => x + ", " + y).Trim(','));
                System.Console.WriteLine(" Output: " + networkOutput.Aggregate("", (x, y) => x + ", " + y).Trim(','));
            }

            //Experiments:
            //                   calculated
            //31;3;2009;4084,76 -> 4121 Error=0.01 Rate=0.7 Iterat=100
            //31;3;2009;4084,76 -> 4096 Error=0.01 Rate=0.7 Iterat=1000
            //31;3;2009;4084,76 -> 4093 Error=0.01 Rate=0.7 Iterat=10000
            //31;3;2009;4084,76 -> 4108 Error=0.01 Rate=0.7 Iterat=100000
            //31;3;2009;4084,76 -> 4084 Error=0.001 Rate=0.7 Iterat=10000

            System.Console.WriteLine("Time stamp N3:" + DateTime.Now.ToString("dd-MMM-yyyy HH:mm:ss:MM"));
            Environment.Exit(0);
        }
Esempio n. 11
0
        private void InitializeMlp()
        {
            var numberOfLayers     = Convert.ToInt32(numLayers.Value);
            var enabledNumLayers   = _numLayersList.Where(numControl => numControl.Enabled).ToList();
            var inputsPerLayer     = ReadInputsPerLayer(enabledNumLayers);
            var neuronsPerLayer    = ReadNeuronsPerLayer(enabledNumLayers);
            var activationFunction = GetActivationFunctionType();

            _mlp = new MultiLayerPerceptron(numberOfLayers, inputsPerLayer, neuronsPerLayer, activationFunction);
        }
Esempio n. 12
0
        public IActionResult TestNeuralNetwork(Guid neuralNetworkId)
        {
            if (!_mlpRepository.NeuralNetworkExists(neuralNetworkId))
            {
                return(NotFound());
            }
            var NeuralNetworkFromRepo          = _mlpRepository.GetFullNeuralNetwork(neuralNetworkId);
            List <TrainingDataDto> trainingSet = TrainingSet.GetTestingSet(NeuralNetworkFromRepo.TrainingConfig);

            return(Ok(MultiLayerPerceptron.TestNetwork(NeuralNetworkFromRepo, trainingSet)));
        }
        public virtual void run()
        {
            Console.WriteLine("Creating training and test set from file...");
            string trainingSetFileName = "data_sets/segment challenge.txt";
            string testSetFileName     = "data_sets/segment test.txt";
            int    inputsCount         = 19;
            int    outputsCount        = 7;

            //Create training data set from file
            DataSet trainingSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",");

            Console.WriteLine("Training set size: " + trainingSet.Rows.Count);
            trainingSet.shuffle();
            trainingSet.shuffle();

            //Normalizing training data set
            Normalizer normalizer = new MaxNormalizer();

            normalizer.normalize(trainingSet);

            //Create test data set from file
            DataSet testSet = DataSet.createFromFile(testSetFileName, inputsCount, outputsCount, ",");

            Console.WriteLine("Test set size: " + testSet.Rows.Count);
            Console.WriteLine("--------------------------------------------------");
            testSet.shuffle();
            testSet.shuffle();

            //Normalizing training data set
            normalizer.normalize(testSet);

            Console.WriteLine("Creating neural network...");
            //Create MultiLayerPerceptron neural network
            MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 17, 10, outputsCount);
            //attach listener to learning rule
            MomentumBackpropagation learningRule = (MomentumBackpropagation)neuralNet.LearningRule;

            learningRule.addListener(this);

            learningRule.LearningRate  = 0.01;
            learningRule.MaxError      = 0.001;
            learningRule.MaxIterations = 12000;

            Console.WriteLine("Training network...");
            //train the network with training set
            neuralNet.learn(trainingSet);

            Console.WriteLine("Testing network...\n\n");
            testNeuralNetwork(neuralNet, testSet);

            Console.WriteLine("Done.");
            Console.WriteLine("**************************************************");
            //        }
        }
Esempio n. 14
0
        public virtual void evaluate()
        {
            Console.WriteLine("Evaluating neural network...");
            //Loading neural network from file
            MultiLayerPerceptron neuralNet = (MultiLayerPerceptron)NeuralNetwork.createFromFile(config.TrainedNetworkFileName);

            //Load normalized balanced data set from file
            DataSet dataSet = DataSet.load(config.TestFileName);

            //Testing neural network
            testNeuralNetwork(neuralNet, dataSet);
        }
Esempio n. 15
0
        //Training neural network with normalized balanced training data set
        public virtual void train()
        {
            Console.WriteLine("Training neural network... ");
            MultiLayerPerceptron neuralNet = (MultiLayerPerceptron)NeuralNetwork.createFromFile(config.TrainedNetworkFileName);

            DataSet dataSet = DataSet.load(config.NormalizedBalancedFileName);

            neuralNet.LearningRule.addListener(this);
            neuralNet.learn(dataSet);
            Console.WriteLine("Saving trained neural network to file... ");
            neuralNet.save(config.TrainedNetworkFileName);
            Console.WriteLine("Neural network successfully saved!");
        }
Esempio n. 16
0
        /// <summary>
        /// Runs this sample
        /// </summary>
        public static void Main(string[] args)
        {
            MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(2, 3, 1);

            // neuralNet.randomizeWeights(new WeightsRandomizer());
            // neuralNet.randomizeWeights(new RangeRandomizer(0.1, 0.9));
            // neuralNet.randomizeWeights(new GaussianRandomizer(0.4, 0.3));
            neuralNet.randomizeWeights(new NguyenWidrowRandomizer(0.3, 0.7));
            printWeights(neuralNet);

            neuralNet.randomizeWeights(new DistortRandomizer(0.5));
            printWeights(neuralNet);
        }
Esempio n. 17
0
        //Creating and saving neural network to file
        public virtual void createNeuralNetwork()
        {
            Console.WriteLine("Creating neural network... ");
            MultiLayerPerceptron    neuralNet    = new MultiLayerPerceptron(config.InputCount, config.FirstHiddenLayerCount, config.SecondHiddenLayerCount, config.OutputCount);
            MomentumBackpropagation learningRule = (MomentumBackpropagation)neuralNet.LearningRule;

            learningRule.LearningRate  = 0.01;
            learningRule.MaxError      = 0.1;
            learningRule.MaxIterations = 1000;
            Console.WriteLine("Saving neural network to file... ");
            neuralNet.save(config.TrainedNetworkFileName);
            Console.WriteLine("Neural network successfully saved!");
        }
Esempio n. 18
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: public void run() throws java.io.FileNotFoundException
        public virtual void run()
        {
            string inputFileName = typeof(BufferedDataSetSample).getResource("data/iris_data_normalised.txt").File;

            // create MultiLayerPerceptron neural network
            MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(4, 16, 3);

            BufferedDataSet irisDataSet = new BufferedDataSet(new File(inputFileName), 4, 3, ",");

            neuralNet.LearningRule.addListener(this);
            neuralNet.learn(irisDataSet);

            // neuralNet.getLearningRule().setMaxError(0.001);
        }
Esempio n. 19
0
        static void Real()
        {
            var inputLayer  = new InputLayer3D(1, 1, 1);
            var outputLayer = new OutputLayer(1)
            {
                ActivationFunction = new ConstOutputArrayFunction()
            };
            var dataProvider = new FunctionProvider();

            InitializeTrainingData(dataProvider.TrainData);
            var perceptron1 = new PerceptronLayer(10, 2)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron2 = new PerceptronLayer(10, 10)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron3 = new PerceptronLayer(8, 10)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron4 = new PerceptronLayer(6, 8)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron5 = new PerceptronLayer(2, 6)
            {
                ActivationFunction = new SigmoidFunction()
            };

            MultiLayerPerceptron network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                DataProvider = dataProvider
            };

            network.HiddenLayers.Add(perceptron1);
            network.HiddenLayers.Add(perceptron2);
            network.HiddenLayers.Add(perceptron3);
            network.HiddenLayers.Add(perceptron4);
            network.HiddenLayers.Add(perceptron5);

            var trainer = new FCTrainer(network, 10, 1, dataProvider);

            trainer.Train(1);
            var error = network.Test(1);
        }
        public void TestTrain()
        {
            var multiLayerPerceptron          = new MultiLayerPerceptron();
            var multiLayerPerceptronParameter =
                new MultiLayerPerceptronParameter(1, 0.1, 0.99, 0.2, 100, 3, ActivationFunction.SIGMOID);

            multiLayerPerceptron.Train(iris.GetInstanceList(), multiLayerPerceptronParameter);
            Assert.AreEqual(5.33, 100 * multiLayerPerceptron.Test(iris.GetInstanceList()).GetErrorRate(), 0.01);
            multiLayerPerceptronParameter = new MultiLayerPerceptronParameter(1, 0.01, 0.99, 0.2, 100, 30, ActivationFunction.SIGMOID);
            multiLayerPerceptron.Train(bupa.GetInstanceList(), multiLayerPerceptronParameter);
            Assert.AreEqual(28.69, 100 * multiLayerPerceptron.Test(bupa.GetInstanceList()).GetErrorRate(), 0.01);
            multiLayerPerceptronParameter = new MultiLayerPerceptronParameter(1, 0.01, 0.99, 0.2, 100, 20, ActivationFunction.SIGMOID);
            multiLayerPerceptron.Train(dermatology.GetInstanceList(), multiLayerPerceptronParameter);
            Assert.AreEqual(1.91, 100 * multiLayerPerceptron.Test(dermatology.GetInstanceList()).GetErrorRate(), 0.01);
        }
Esempio n. 21
0
        static void OneTrainingData()
        {
            var inputLayer  = new InputLayer3D(1, 1, 3);
            var outputLayer = new OutputLayer(2)
            {
                ActivationFunction = new ConstOutputArrayFunction()
            };
            var weight1 = new List <Array3D> {
                new Array3D(0.1, 0.3), new Array3D(0.3, 0.1)
            };
            var weight2 = new List <Array3D> {
                new Array3D(0.4, 0.5), new Array3D(0.3, 0.5)
            };
            var perceptron1 = new PerceptronLayer(weight1)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var perceptron2 = new PerceptronLayer(weight2)
            {
                ActivationFunction = new SigmoidFunction()
            };
            var dataProvider = new FunctionProvider
            {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D> {
                        Input = new Array3D(0.3,                         0.4, 0.5), Expected = new Array3D(0.2, 0.6)
                    },
                    new TrainingData <Array3D, Array3D> {
                        Input = new Array3D(0.2, 0.4, 0.7), Expected = new Array3D(0.1, 0.8)
                    }
                }
            };

            var network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                DataProvider = dataProvider
            };

            network.HiddenLayers.Add(perceptron1);
            network.HiddenLayers.Add(perceptron2);

            var trainer = new FCTrainer(network, 2, 1, dataProvider);

            trainer.Train(100);
        }
Esempio n. 22
0
 public FormLearningPerceptron(int inC, int outC, int hiddenLC, int neuronC)
 {
     inCount          = inC;
     outCount         = outC;
     hiddenLayerCount = hiddenLC;
     neuronCount      = neuronC;
     Xmins            = new Vector(inC);
     Xmaxs            = new Vector(inC);
     Ymins            = new Vector(outC);
     Ymaxs            = new Vector(outC);
     //perceptron = new MultiLayerPerceptron(2, 1, 4, 3);
     //perceptron = new MultiLayerPerceptron(inC, outC, hiddenLC, neuronC);
     //perceptron.Train(X, Y, 0.5f, 0.5f, 0.005f, 10000000);
     perceptron = new MultiLayerPerceptron(inCount, outCount, hiddenLayerCount, neuronCount, 1.0f);
     InitializeComponent();
 }
Esempio n. 23
0
        public virtual void run()
        {
            Console.WriteLine("Creating training and test set from file...");
            string trainingSetFileName = "data_sets/diabetes.txt";
            int    inputsCount         = 8;
            int    outputsCount        = 2;

            //Create data set from file
            DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",");

            dataSet.shuffle();

            //Normalizing data set
            Normalizer normalizer = new MaxNormalizer();

            normalizer.normalize(dataSet);

            //Creatinig training set (70%) and test set (30%)
            DataSet[] trainingAndTestSet = dataSet.createTrainingAndTestSubsets(70, 30);
            DataSet   trainingSet        = trainingAndTestSet[0];
            DataSet   testSet            = trainingAndTestSet[1];

            //        for (int i = 0; i < 21; i++) {
            Console.WriteLine("Creating neural network...");
            //Create MultiLayerPerceptron neural network
            MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 9, outputsCount);
            //            System.out.println("HIDDEN COUNT: " + i);
            //attach listener to learning rule
            MomentumBackpropagation learningRule = (MomentumBackpropagation)neuralNet.LearningRule;

            learningRule.addListener(this);

            learningRule.LearningRate  = 0.05;
            learningRule.MaxError      = 0.01;
            learningRule.MaxIterations = 10000;

            Console.WriteLine("Training network...");
            //train the network with training set
            neuralNet.learn(trainingSet);

            Console.WriteLine("Testing network...\n\n");
            testNeuralNetwork(neuralNet, testSet);

            Console.WriteLine("Done.");
            Console.WriteLine("**************************************************");
            //        }
        }
        public static void Main(string[] args)
        {
            // create data set from csv file
            MultiLayerPerceptron neuralNet = (MultiLayerPerceptron)NeuralNetwork.createFromFile("irisNet.nnet");
            DataSet dataSet = DataSet.createFromFile("data_sets/iris_data_normalised.txt", 4, 3, ",");

            string[] classNames = new string[] { "Virginica", "Setosa", "Versicolor" };

            CrossValidation crossval = new CrossValidation(neuralNet, dataSet, 5);

            crossval.addEvaluator(new ClassifierEvaluator.MultiClass(classNames));

            crossval.run();
            CrossValidationResult results = crossval.Result;

            Console.WriteLine(results);
        }
        public IActionResult GetImageProcessed(Guid neuralNetworkId, [FromBody] ImageDto image)
        {
            var neuralNetworkFromRepo = _mlpRepository.GetFullNeuralNetwork(neuralNetworkId);

            if (neuralNetworkFromRepo == null)
            {
                return(NotFound("Red no encontrada"));
            }

            var imageProcessingConfigActiveFromRepo = _mlpRepository.GetActiveImageProcessingConfigByNeuralNetwork(neuralNetworkId);

            ImageProcessing.MLP mlp   = new ImageProcessing.MLP();
            List <double>       input = mlp.ProcessImageMLP(image, imageProcessingConfigActiveFromRepo);

            //b.("");
            return(Ok(MultiLayerPerceptron.GetNetworkPrediction(neuralNetworkFromRepo, input)));
        }
        public IActionResult GetImageProcessedRasp(Guid neuralNetworkId, [FromBody] ImageRaspDto image)
        {
            var neuralNetworkFromRepo = _mlpRepository.GetFullNeuralNetwork(neuralNetworkId);

            if (neuralNetworkFromRepo == null)
            {
                return(NotFound("Red no encontrada"));
            }

            var imageProcessingConfigActiveFromRepo = _mlpRepository.GetActiveImageProcessingConfigByNeuralNetwork(neuralNetworkId);

            ImageProcessing.MLP mlp = new ImageProcessing.MLP();
            WriteFilesHelper.WriteImageFromBytes(WriteFilesHelper.GetStartupFolder(), "Test.bmp", Convert.FromBase64String(image.ImageBase64WithMetadata));
            List <double> input = mlp.ProcessLocalImageMLP(WriteFilesHelper.GetLocalRaspImage(), imageProcessingConfigActiveFromRepo);

            //b.("");
            return(Ok(MultiLayerPerceptron.GetNetworkPrediction(neuralNetworkFromRepo, input)));
        }
Esempio n. 27
0
        public void SmallCaseTest()
        {
            MultiLayerPerceptron p = new MultiLayerPerceptron(
                2,
                new int[] { 3, 2, 2 },
                new LayerType[] { LayerType.Sigmoid, LayerType.Sigmoid, LayerType.None }
                );

            p.SetWeights(
                new double[3][, ]
            {
                new double[, ] {
                    { 0.1, 0.3, 0.5 },
                    { 0.2, 0.4, 0.6 }
                },
                new double[, ] {
                    { 0.1, 0.4 },
                    { 0.2, 0.5 },
                    { 0.3, 0.6 }
                },
                new double[, ]
                {
                    { 0.1, 0.3 },
                    { 0.2, 0.4 }
                }
            }
                );
            p.SetBias(
                new double[3][]
            {
                new double[] { 0.1, 0.2, 0.3 },
                new double[] { 0.1, 0.2 },
                new double[] { 0.1, 0.2 }
            }
                );
            var result = p.Predict(new double[, ] {
                { 1.0, 0.5 }
            });

            // valid result: 0.31682708, 0.69627909
            Console.WriteLine("result: {0}, {1}", result[0, 0], result[0, 1]);
            Assert.IsTrue(0.31682707 < result[0, 0] && result[0, 0] < 0.31682709);
            Assert.IsTrue(0.69627908 < result[0, 1] && result[0, 1] < 0.69627910);
        }
Esempio n. 28
0
        /// <param name="dataSet"> training set used for error estimation </param>
        /// <returns> neural network model with optimized architecture for provided data set </returns>
        public virtual NeuralNetwork createOptimalModel(DataSet dataSet)
        {
            List <int> neurons = new List <int>();

            neurons.Add(minNeuronsPerLayer);
            findArchitectures(1, minNeuronsPerLayer, neurons);

            LOG.info("Total [{}] different network topologies found", allArchitectures.Count);

            foreach (List <int> architecture in allArchitectures)
            {
                architecture.Insert(0, dataSet.InputSize);
                architecture.Add(dataSet.OutputSize);

                LOG.info("Architecture: [{}]", architecture);

                MultiLayerPerceptron network  = new MultiLayerPerceptron(architecture);
                LearningListener     listener = new LearningListener(10, learningRule.MaxIterations);
                learningRule.addListener(listener);
                network.LearningRule = learningRule;

                errorEstimationMethod = new CrossValidation(network, dataSet, 10);
                errorEstimationMethod.run();
                // FIX
                var evaluator = errorEstimationMethod.getEvaluator <ClassifierEvaluator.MultiClass>(typeof(ClassifierEvaluator.MultiClass));

                ClassificationMetrics[] result = ClassificationMetrics.createFromMatrix(evaluator.Result);

                // nadji onaj sa najmanjim f measure
                if (optimalResult == null || optimalResult.FMeasure < result[0].FMeasure)
                {
                    LOG.info("Architecture [{}] became optimal architecture  with metrics {}", architecture, result);
                    optimalResult      = result[0];
                    optimalClassifier  = network;
                    optimalArchitecure = architecture;
                }

                LOG.info("#################################################################");
            }


            LOG.info("Optimal Architecture: {}", optimalArchitecure);
            return(optimalClassifier);
        }
        public virtual void run()
        {
            Console.WriteLine("Creating training set...");
            string trainingSetFileName = "data_sets/cpu_data.txt";
            int    inputsCount         = 7;
            int    outputsCount        = 1;

            // create training set from file
            DataSet    dataSet    = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false);
            Normalizer normalizer = new MaxNormalizer();

            normalizer.normalize(dataSet);


            Console.WriteLine("Creating neural network...");
            // create MultiLayerPerceptron neural network
            MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, outputsCount);


            // attach listener to learning rule
            MomentumBackpropagation learningRule = (MomentumBackpropagation)neuralNet.LearningRule;

            learningRule.addListener(this);

            // set learning rate and max error
            learningRule.LearningRate = 0.2;
            learningRule.MaxError     = 0.01;

            Console.WriteLine("Training network...");
            // train the network with training set
            neuralNet.learn(dataSet);

            Console.WriteLine("Training completed.");
            Console.WriteLine("Testing network...");

            testNeuralNetwork(neuralNet, dataSet);

            Console.WriteLine("Saving network");
            // save neural network to file
            neuralNet.save("MyNeuralNetCPU.nnet");

            Console.WriteLine("Done.");
        }
Esempio n. 30
0
    void Start()
    {
        this.spaceshipRigidBody = this.GetComponent <Rigidbody>();

        this.evolutionController = this.transform.parent.parent.GetComponent <IEvolutionaryController>();
        this.instanceIndex       = this.evolutionController.getInstanceIndex();
        this.simulationInstance  = this.evolutionController.getInstanceByIndex(instanceIndex);
        this.neuralNetwork       = this.evolutionController.getGenomeByIndex(instanceIndex).buildNeuralNetwork();

        if (evolutionController.isRunning())
        {
            Vector3 padPosition = this.evolutionController.getPadPosition();
            this.targetObject.transform.position = new Vector3(targetObject.transform.position.x + padPosition.x, targetObject.transform.position.y + padPosition.y, targetObject.transform.position.z + padPosition.z);
        }
        else
        {
            this.targetObject.transform.position = this.evolutionController.getPadPosition();
        }
    }
        public static void Main(string[] args)
        {
            ITunableParameterService parameters = TunableParameterService.Instance;
            int numInputs = parameters.NumberOfInputs;
            int numOutputs = parameters.NumberOfOutputs;
            NetworkType type = parameters.NeuralNetworkType;
            int numHiddenLayers = parameters.NumberOfHiddenLayers;
            int neuronsPerHiddenLayer = parameters.NumberOfNodesPerHiddenLayer;

            FunctionApproximator approximator = new FunctionApproximator();
            NeuralNetwork network;

            if (type == NetworkType.MLP) {
                network = new MultiLayerPerceptron(numInputs, numOutputs, numHiddenLayers, neuronsPerHiddenLayer);
            }
            else {
                network = new RadialBasisFunctionNetwork(numInputs, numOutputs);
            }

            approximator.ApproximateFunction(network);
        }
        public MultiLayerPerceptron BuildMultiLayerPerceptron()
        {
            // 4 inputs, 3 hidden neurons, 2 outputs, fully connected

            var factory = new SigmoidNeuronFactory();

            var network = new MultiLayerPerceptron(factory);

            // input values
            var i1 = new Input();
            var i2 = new Input();
            var i3 = new Input();
            var i4 = new Input();

            // hidden layer
            var hn1 = factory.Create();
            var hn2 = factory.Create();
            var hn3 = factory.Create();
            //var hiddenLayer = new Layer(new List<INeuron> { hn1, hn2, hn3 });
            
            // output neurons
            var on1 = factory.Create();
            var on2 = factory.Create();
            //var outputLayer = new Layer(new List<INeuron> {on1, on2});

            network.AddInput(i1);
            network.AddInput(i2);
            network.AddInput(i3);
            network.AddInput(i4);

            // hidden layer
            network.AddHiddenNeuron(hn1);
            network.AddHiddenNeuron(hn2);
            network.AddHiddenNeuron(hn3);

            // output layer
            network.AddOutput(on1);
            network.AddOutput(on2);


            // hidden connections
            var dh11 = new Dendrite(learningRate: 1);
            var dh12 = new Dendrite(learningRate: 1);
            var dh21 = new Dendrite(learningRate: 1);
            var dh22 = new Dendrite(learningRate: 1);
            var dh31 = new Dendrite(learningRate: 1);
            var dh32 = new Dendrite(learningRate: 1);

            dh11.SetConnection(hn1);
            dh12.SetConnection(hn1);
            dh21.SetConnection(hn2);
            dh22.SetConnection(hn2);
            dh31.SetConnection(hn3);
            dh32.SetConnection(hn3);

            on1.Connect(dh11);
            on1.Connect(dh21);
            on1.Connect(dh31);
            on2.Connect(dh12);
            on2.Connect(dh22);
            on2.Connect(dh32);


            // input dendrite connections
            var d11 = new Dendrite(learningRate: 1);
            var d12 = new Dendrite(learningRate: 1);
            var d13 = new Dendrite(learningRate: 1);
            var d21 = new Dendrite(learningRate: 1);
            var d22 = new Dendrite(learningRate: 1);
            var d23 = new Dendrite(learningRate: 1);
            var d31 = new Dendrite(learningRate: 1);
            var d32 = new Dendrite(learningRate: 1);
            var d33 = new Dendrite(learningRate: 1);

            d11.SetConnection(i1);
            d12.SetConnection(i1);
            d13.SetConnection(i1);
            d21.SetConnection(i2);
            d22.SetConnection(i2);
            d23.SetConnection(i2);
            d31.SetConnection(i3);
            d32.SetConnection(i3);
            d33.SetConnection(i3);

            hn1.Connect(d11);
            hn1.Connect(d21);
            hn1.Connect(d31);

            hn2.Connect(d12);
            hn2.Connect(d22);
            hn2.Connect(d32);

            hn3.Connect(d13);
            hn3.Connect(d23);
            hn3.Connect(d33);

            return network;
        }