public void Run()
        {
            // create training set (logical XOR function)
            TrainingSet trainingSet = new TrainingSet(2, 1);
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 0, 0 }, new double[] { 0 }));
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 0, 1 }, new double[] { 1 }));
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 1, 0 }, new double[] { 1 }));
            trainingSet.Add(new SupervisedTrainingElement(new double[] { 1, 1 }, new double[] { 0 }));

            // create multi layer perceptron
            MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1);
            // learn the training set
            Console.WriteLine("Training neural network...");
            myMlPerceptron.LearnInSameThread(trainingSet);

            // test perceptron
            Console.WriteLine("Testing trained neural network");
            TestNeuralNetwork(myMlPerceptron, trainingSet);

            // save trained neural network
            myMlPerceptron.Save("myMlPerceptron.nnet");

            // load saved neural network
            NeuralNetwork loadedMlPerceptron = NeuralNetwork.Load("myMlPerceptron.nnet");

            // test loaded neural network
            //Console.WriteLine("Testing loaded neural network");
            //testNeuralNetwork(loadedMlPerceptron, trainingSet);
        }
        /// <summary>
        /// Creates and returns a new instance of Multi Layer Perceptron
        /// </summary>
        /// <param name="layersStr">space separated number of neurons in layers</param>
        /// <param name="transferFunctionType">transfer function type for neurons</param>
        /// <param name="learningRule">instance of Multi Layer Perceptron</param>
        /// <param name="useBias"></param>
        /// <param name="connectIO"></param>
        /// <returns></returns>
        public static MultiLayerPerceptron CreateMLPerceptron(String layersStr, TransferFunctionType transferFunctionType, Type learningRule, bool useBias, bool connectIO)
        {
            IList<int> layerSizes = VectorParser.ParseInteger(layersStr);
            NeuronProperties neuronProperties = new NeuronProperties(transferFunctionType, useBias);
            MultiLayerPerceptron nnet = new MultiLayerPerceptron(layerSizes, neuronProperties);

            // set learning rule
            if (learningRule.Name.Equals(typeof(BackPropagation).Name))
            {
                nnet.LearningRule = new BackPropagation();
            }
            else if (learningRule.Name.Equals(typeof(MomentumBackpropagation).Name))
            {
                nnet.LearningRule = new MomentumBackpropagation();
            }
            else if (learningRule.Name.Equals(typeof(DynamicBackPropagation).Name))
            {
                nnet.LearningRule = new DynamicBackPropagation();
            }

            // connect io
            if (connectIO)
            {
                nnet.ConnectInputsToOutputs();
            }

            return nnet;
        }
 /// <summary>
 /// Creates and returns a new instance of Multi Layer Perceptron
 /// </summary>
 /// <param name="layersStr">space separated number of neurons in layers</param>
 /// <param name="transferFunctionType">transfer function type for neurons</param>
 /// <returns>instance of Multi Layer Perceptron</returns>
 public static MultiLayerPerceptron CreateMLPerceptron(String layersStr, TransferFunctionType transferFunctionType)
 {
     IList<int> layerSizes = VectorParser.ParseInteger(layersStr);
     MultiLayerPerceptron nnet = new MultiLayerPerceptron(layerSizes,
             transferFunctionType);
     return nnet;
 }
Beispiel #4
0
        public void Run()
        {
            NeuralNetwork network = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, WINDOW_SIZE, 10, 1);
            /*if( !FlatNetworkPlugin.flattenNeuralNetworkNetwork(network) )
            {
                Console.WriteLine("Failed to flatten network.");
            }*/

            NormalizeSunspots(0.1, 0.9);

            network.LearningRule.AddObserver(this);

            TrainingSet training = GenerateTraining();
            network.LearnInSameThread(training);
            Predict(network);

            //FlatNetworkPlugin.shutdown();
        }