Exemplo n.º 1
0
        public static void TestTanhLearningOnSinus()
        {
            NNetwork network = NNetwork.HyperbolicNetwork(new int[] { 1, 2, 1 });

            network.RandomizeWeights(1, 2);
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] inputs  = SinusTrainSet()[0];
            double[][] outputs = SinusTrainSet()[1];
            double     error   = 1;
            double     delta   = 1;
            int        j       = 0;

            for (; error > 0.01 && !(delta <= 0.000001) || j == 1; j++)
            {
                trainer.TrainClassification(inputs, outputs);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            double[][] input_test  = SinusTrainSet(20)[0];
            double[][] output_test = SinusTrainSet(20)[1];
            trainer.IsLearning = false;
            trainer.TrainClassification(input_test, output_test);
            error = trainer.GetError();
            Console.Out.WriteLine(error);
            for (int i = 0; i < input_test.Length; i++)
            {
                network.SetInput(input_test[i]);
                Show(new [] { input_test[i][0], network.GetOutput()[0], Math.Sin(input_test[i][0]) });
            }
        }
Exemplo n.º 2
0
        private void buttonRandomize_Click(object sender, EventArgs e)
        {
            int seed = int.Parse(textSeed.Text);

            network.RandomizeWeights(seed);
            groupData.Enabled = true;
        }
Exemplo n.º 3
0
        public void TestRandomInit()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 3, 2 });

            n.RandomizeWeights(seed: 0);
            var first = n.GetWeightMatrix();

            n.RandomizeWeights(seed: 0);
            var equal = n.GetWeightMatrix();

            Assert.AreEqual(first, equal);
            n.RandomizeWeights(seed: 1);
            var not_equal = n.GetWeightMatrix();

            Assert.AreNotEqual(first, not_equal);
        }
Exemplo n.º 4
0
        public void TestTanhDerivative()
        {
            // SO-SO test =(
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders  = n.Derivatives();
            double[] ests  = n.Estimation(0.0001);
            var      koeff = ests[0] / ders[0];

            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ests[i] / ders[i], koeff, 0.00001);
            }
        }
Exemplo n.º 5
0
        public void TestTanhLearningOnSinus()
        {
            NNetwork network = NNetwork.HyperbolicNetwork(new int[] { 1, 2, 1 });

            network.RandomizeWeights(1, 2);
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] inputs  = SinusTrainSet()[0];
            double[][] outputs = SinusTrainSet()[1];
            double     error   = 1;
            double     delta   = 1;
            int        j       = 0;

            for (; error > 0.01 && !(delta <= 0.000001) || j == 1; j++)
            {
                trainer.TrainClassification(inputs, outputs);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            double[][] input_test  = SinusTrainSet(20)[0];
            double[][] output_test = SinusTrainSet(20)[1];
            trainer.IsLearning = false;
            trainer.TrainClassification(input_test, output_test);
            error = trainer.GetError();
            Assert.Less(error, 0.53);
        }
Exemplo n.º 6
0
        public static void TestTanhDerivative()
        {
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                Show(new[] { ders[i], ests[i], ests[i] / ders[i] });
            }
        }
Exemplo n.º 7
0
//        public static void Main()
//        {
//            TrainPrediction();
////            Sinus();
////            TestTanhLearningOnSinus();
////            TestTanhDerivative();
//
//        }

        public static void TrainPrediction()
        {
            NNetwork network = NNetwork.SigmoidNetwork(new int[] { 5, 1 });

            network.RandomizeWeights(-1, 20);
            NetworkTrainer trainer = new NetworkTrainer(network);
            List <double>  tr      = new List <double>();

            for (double i = 0; i <= 1; i = i + 0.05)
            {
                tr.Add(i);
            }
            double[] train_set = tr.ToArray();//new double[] { 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1 };
            double   error     = 1;
            double   delta     = 1;
            int      j         = 0;

            for (; error > 0.01 && !(delta <= 0.00001) || j == 1; j++)
            {
                trainer.TrainPrediction(train_set, 0.0001, 0.2);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            Console.Out.WriteLine(j + ": " + error);
            for (double i = 0; i <= 0.5; i = i + 0.05)
            {
                network.SetInput(new double[] { i + 0.0, i + 0.1, i + 0.2, i + 0.3, i + 0.4 });
                Show(new double[]
                {
                    i + 0.5,
                    network.GetOutput()[0],
//                        network.GetOutput()[1]
                });
            }
        }
Exemplo n.º 8
0
        public void TestDerivative()
        {
            //Fails with square error function
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ders[i], ests[i], 0.0001);
            }
        }