public void TrainXor()
        {
            NeuralNetworkBuilder builder = new NeuralNetworkBuilder();
            NeuralNetwork        network = builder.CreateNew()
                                           .AddInputLayer(2, ActivationFunctions.Sigmoid, false)
                                           .AddHiddenLayer(2, ActivationFunctions.Sigmoid, false)
                                           .AddOutputLayer(1, ActivationFunctions.Sigmoid)
                                           .GetNetwork();
            BackPropagationTrainer trainer = new BackPropagationTrainer(network);

            TrainingData[] data = new TrainingData[]
            {
                new TrainingData(new double[] { 1, 1 }, new double[] { 0 }),
                new TrainingData(new double[] { 1, 0 }, new double[] { 1 }),
                new TrainingData(new double[] { 0, 1 }, new double[] { 1 }),
                new TrainingData(new double[] { 0, 0 }, new double[] { 0 }),
            };
            var result = trainer.Train(data, 100000, 0.7, 0.0, 0.005);

            string csv = result.Errors.ToCsvString();

            Assert.IsTrue(result.IsSuccessful, "could not traing against expected error");

            double[] result1 = network.Run(new double[] { 1.0, 1.0 });
            double[] result2 = network.Run(new double[] { 1.0, 0.0 });
            double[] result3 = network.Run(new double[] { 0.0, 1.0 });
            double[] result4 = network.Run(new double[] { 0.0, 0.0 });

            Assert.IsTrue(result1[0].IsEqual(0, 0.0005));
            Assert.IsTrue(result2[0].IsEqual(1, 0.0005));
            Assert.IsTrue(result3[0].IsEqual(1, 0.0005));
            Assert.IsTrue(result4[0].IsEqual(0, 0.0005));
        }
        public void Test_Train_SimpleRow()
        {
            var network = new Network(3, 4, 2);

            Helper_InitWeights(network);

            var trainer = new BackPropagationTrainer(network);
            var rows    = new[] { new BackPropagationTrainingRow(new [] { 1.0, -2.0, 3.0 }, new [] { 0.1234, 0.8766 }) };

            trainer.Train(rows, 0.5, 10000);

            const double tolerance = 0.00001;

            Assert.AreEqual(0.07770, network.HiddenNodes[0].Incoming[0].Weight, tolerance);
            Assert.AreEqual(0.08118, network.HiddenNodes[1].Incoming[0].Weight, tolerance);
            Assert.AreEqual(0.08441, network.HiddenNodes[2].Incoming[0].Weight, tolerance);

            network.Invalidate();
            for (var i = 0; i < rows[0].Inputs.Length; i++)
            {
                network.InputNodes[i].Value = rows[0].Inputs[i];
            }
            Assert.AreEqual(rows[0].Outputs[0], network.OutputNodes[0].Value, tolerance);
            Assert.AreEqual(rows[0].Outputs[1], network.OutputNodes[1].Value, tolerance);
        }
Exemplo n.º 3
0
        public void Run()
        {
            double[][] inputs =
            {
                new double[] { 0, 0 },
                new double[] { 1, 0 },
                new double[] { 0, 1 },
                new double[] { 1, 1 }
            };

            double[][] outputs =
            {
                new double[] { 0 },
                new double[] { 1 },
                new double[] { 1 },
                new double[] { 0 }
            };
            TrainingSet trainingSet = new TrainingSet(inputs, outputs);
            IBackPropagationConstants backPropagationConstants = new BackPropagationConstants(learningRate: 0.25, momentum: 0.9, maxInitWeight: 0.5, outputTolerance: 0.1, maxIterations: 10000);
            FeedForwardNet            xorNet = new FeedForwardNet(new int[] { 2, 2, 1 }, backPropagationConstants.MaxInitWeight);
            IBackPropagationAlgorithm backPropagationAlgorithm = new BackPropagationStandardAlgorithm(backPropagationConstants);
            IBackPropagationTrainer   backPropagationTrainer   = new BackPropagationTrainer(xorNet, backPropagationAlgorithm, trainingSet);

            backPropagationTrainer.FeedForwardTrain();
        }
Exemplo n.º 4
0
        public static Network CreateAndTrainNetwork(Scenario scenario, double adjust, int rounds)
        {
            var network = CreateNetwork(scenario);
            var trainer = new BackPropagationTrainer(network);
            var rows    = GetTrainingRows(scenario);

            trainer.Train(rows, adjust, rounds);
            return(network);
        }
Exemplo n.º 5
0
 private async void Train()
 {
     if (_trainingRows.Count > 0)
     {
         using (BusyScope())
         {
             await Task.Factory.StartNew(() =>
             {
                 var trainer = new BackPropagationTrainer(_network);
                 trainer.Train(_trainingRows, 0.5, 50);
                 IsolatedStorageHelper.Save("weights.xml", _network.GetWeights());
             });
         }
     }
 }
Exemplo n.º 6
0
        public void Test_Train_SimpleRow()
        {
            var network = new Network(3, 4, 2);

              Helper_InitWeights(network);

              var trainer = new BackPropagationTrainer(network);
              var rows = new[] { new BackPropagationTrainingRow (new [] { 1.0, -2.0, 3.0 }, new [] { 0.1234, 0.8766 }) };
              trainer.Train(rows, 0.5, 10000);

              const double tolerance = 0.00001;
              Assert.AreEqual(0.07770, network.HiddenNodes[0].Incoming[0].Weight, tolerance);
              Assert.AreEqual(0.08118, network.HiddenNodes[1].Incoming[0].Weight, tolerance);
              Assert.AreEqual(0.08441, network.HiddenNodes[2].Incoming[0].Weight, tolerance);

              network.Invalidate();
              for (var i = 0; i < rows[0].Inputs.Length; i++)
              {
            network.InputNodes[i].Value = rows[0].Inputs[i];
              }
              Assert.AreEqual(rows[0].Outputs[0], network.OutputNodes[0].Value, tolerance);
              Assert.AreEqual(rows[0].Outputs[1], network.OutputNodes[1].Value, tolerance);
        }
Exemplo n.º 7
0
        private Tuple <Type, object> EvaluateCreateTrainer(ParseTreeNode node)
        {
            var trainerClassName = node.ChildNodes[0];
            var trainerParams    = EvaluateKeyValuePair(node.ChildNodes[1]);

            double learnRate           = 0.05;
            double minError            = 0.01;
            int    maxEpochs           = 100;
            int    maxHiddenLayers     = 2;
            int    show                = 10;
            IPerformanceFunction pFunc = null;
            TrainingModes        tMode = TrainingModes.OnLine;

            if (trainerParams.ContainsKey("learnRate"))
            {
                learnRate = double.Parse(trainerParams["learnRate"]);
            }
            if (trainerParams.ContainsKey("minError"))
            {
                minError = double.Parse(trainerParams["minError"]);
            }
            if (trainerParams.ContainsKey("maxEpochs"))
            {
                maxEpochs = int.Parse(trainerParams["maxEpochs"]);
            }
            if (trainerParams.ContainsKey("maxHiddenLayers"))
            {
                maxHiddenLayers = int.Parse(trainerParams["maxHiddenLayers"]);
            }
            if (trainerParams.ContainsKey("show"))
            {
                show = int.Parse(trainerParams["show"]);
            }
            if (trainerParams.ContainsKey("performanceFunction"))
            {
                pFunc = (IPerformanceFunction)(typeof(PerformanceFunctions)
                                               .GetField(trainerParams["performanceFunction"])
                                               .GetValue(null));
            }
            if (trainerParams.ContainsKey("mode"))
            {
                tMode = (TrainingModes)Enum.Parse(typeof(TrainingModes), trainerParams["mode"]);
            }

            Trainer trainer = null;

            switch (trainerClassName.Token.Text)
            {
            case "BackPropagationTrainer":
                trainer = new BackPropagationTrainer(learnRate, minError, 0.01, maxEpochs, show, pFunc, tMode);
                break;

            case "PerceptronTrainer":
                trainer = new PerceptronTrainer(learnRate, minError, 0.01, maxEpochs, show, pFunc, tMode);
                break;

            case "ConstructiveTrainer":
                trainer = new ConstructiveTrainer(learnRate, minError, 0.01, maxEpochs, show, maxHiddenLayers,
                                                  pFunc, tMode);
                break;

            default:
                throw new Exception("Trainer of kind " + trainerClassName.Token.Text + " does not exist.");
            }
            return(new Tuple <Type, object>(typeof(Trainer), trainer));
        }