Ejemplo n.º 1
0
 public static bool Test3()
 {
     var mlp = new MLP(2,2,1);
     mlp[0, 0, 0][0] = 1;
     mlp[0, 0, 1][0] = 2;
     mlp[0, 1, 0][0] = 3;
     mlp[0, 1, 1][0] = 4;
     mlp[0, 2, 0][0] = 5; //weight of bias to neuron #1 connection in next layer
     mlp[0, 2, 1][0] = 6; //bias to neuron #2 in next layer
     mlp[1, 0, 0][0] = 7;
     mlp[1, 1, 0][0] = 8;
     mlp[1, 2, 0][0] = 9; //bias to output
     return true;
 }
Ejemplo n.º 2
0
        public static bool Test4()
        {
            var xorinput = new[] {new[]{0.0, 0.0}, new[]{1.0, 0.0}, new[]{0.0, 1.0}, new[]{1.0, 1.0}};
            var xoroutput = new[] { new[] { 0.0 }, new[] { 1.0 }, new[] { 1.0 }, new[] { 0.0 } };
            var mlp = new MLP(2, 4, 1);
            var stsp = new StandardTrainingSetProvider(xorinput, xoroutput);
            stsp.Split();
            var gdbp = new BackPropagation(mlp, stsp);

            ((GD)gdbp.Solver).Criterion = LearningCriterion.CrossEntropy;
            //((GD)gdbp.Solver).LearningRate = .01;
            ((GD)gdbp.Solver).AdaGrad = true;

            var minerr = double.MaxValue;

            gdbp.ReportReady += optimizer =>
            {
                Console.WriteLine("Epoch = {0}, Error = {1}", optimizer.CurrentEpoch,
                    optimizer.TrainingSetProvider.TrainError);
                minerr = Math.Min(minerr, optimizer.Solver.Error);

                if (optimizer.Done)
                {
                    Console.ReadLine();
                }
            };

            gdbp.RunAsync().Wait();
            return true;
        }