Пример #1
0
        public void TestTanhNetwork()
        {
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 4, 4, 2 });

            INeuron[][] neurons = n.Neurons;
            Assert.IsInstanceOf(typeof(TanhNeuron), neurons[2][2]);
        }
Пример #2
0
        public void TestTanhDerivative()
        {
            // SO-SO test =(
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders  = n.Derivatives();
            double[] ests  = n.Estimation(0.0001);
            var      koeff = ests[0] / ders[0];

            for (int i = 0; i < ders.Length; i++)
            {
                MyAssert.CloseTo(ests[i] / ders[i], koeff, 0.00001);
            }
        }
Пример #3
0
    private NNetwork[] PickBestPopulation()
    {
        NNetwork[] newPopulation = new NNetwork[initialPopulation];

        for (int i = 0; i < bestAgentSelection; i++)
        {
            newPopulation[naturallySelected]             = population[i].InitialiseCopy(controller.LAYER, controller.NEURON);
            newPopulation[naturallySelected].performance = 0;
            naturallySelected++;

            int f = Mathf.RoundToInt(population[i].performance * 10);
            for (int c = 0; c < f; c++)
            {
                genePool.Add(i);
            }
        }

        for (int i = 0; i < worstAgentSelection; i++)
        {
            int last = population.Length - 1;
            last -= i;

            int f = Mathf.RoundToInt(population[last].performance * 10);

            for (int c = 0; c < f; c++)
            {
                genePool.Add(last);
            }
        }
        return(newPopulation);
    }
Пример #4
0
        private void buttonCreateNetwork_Click(object sender, EventArgs e)
        {
            String[] layers_string = textLayers.Text.Split(";".ToCharArray());
            int[]    layers        = new int[layers_string.Length];
            for (int i = 0; i < layers_string.Length; i++)
            {
                layers[i] = int.Parse(layers_string[i]);
            }
            if (radioHyperbolic.Checked)
            {
                network       = NNetwork.HyperbolicNetwork(layers);
                is_hyperbolic = true;
                is_sigmoid    = false;
            }
            if (radioSigmoid.Checked)
            {
                network       = NNetwork.SigmoidNetwork(layers);
                is_hyperbolic = false;
                is_sigmoid    = true;
            }
            if (radioCombined.Checked)
            {
                network       = NNetwork.CombinedNetwork(layers);
                is_hyperbolic = false;
                is_sigmoid    = true;
            }
            bool two_steps   = network.OutputCount() >= 2;
            bool three_steps = network.OutputCount() >= 3;

            checkTrain2.Enabled  = two_steps;
            checkTest2.Enabled   = two_steps;
            checkTrain3.Enabled  = three_steps;
            checkTest3.Enabled   = three_steps;
            groupWeights.Enabled = true;
        }
Пример #5
0
        public void TestTanhLearningOnSinus()
        {
            NNetwork network = NNetwork.HyperbolicNetwork(new int[] { 1, 2, 1 });

            network.RandomizeWeights(1, 2);
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] inputs  = SinusTrainSet()[0];
            double[][] outputs = SinusTrainSet()[1];
            double     error   = 1;
            double     delta   = 1;
            int        j       = 0;

            for (; error > 0.01 && !(delta <= 0.000001) || j == 1; j++)
            {
                trainer.TrainClassification(inputs, outputs);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            double[][] input_test  = SinusTrainSet(20)[0];
            double[][] output_test = SinusTrainSet(20)[1];
            trainer.IsLearning = false;
            trainer.TrainClassification(input_test, output_test);
            error = trainer.GetError();
            Assert.Less(error, 0.53);
        }
Пример #6
0
        static void Main(string[] args)
        {
            // создание экземпляра функции активации
            Sigmoid sigmoid = new Sigmoid();

            network = new NNetwork(sigmoid, new int[] { 2, 4, 2 });

            TrainNetwork();

            TestNetwork();

            Console.WriteLine();

            Console.WriteLine("Save Load Test");

            NNetworkSaver saver = new NNetworkSaver(network);

            saver.SaveNetwork("network.nwk");

            NNetworkLoader loader = new NNetworkLoader("network.nwk", new ConsoleLogger());

            network = loader.LoadNNetwork(sigmoid);

            TestNetwork();

            Console.ReadLine();
        }
Пример #7
0
        public static void TestTanhLearningOnSinus()
        {
            NNetwork network = NNetwork.HyperbolicNetwork(new int[] { 1, 2, 1 });

            network.RandomizeWeights(1, 2);
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] inputs  = SinusTrainSet()[0];
            double[][] outputs = SinusTrainSet()[1];
            double     error   = 1;
            double     delta   = 1;
            int        j       = 0;

            for (; error > 0.01 && !(delta <= 0.000001) || j == 1; j++)
            {
                trainer.TrainClassification(inputs, outputs);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            double[][] input_test  = SinusTrainSet(20)[0];
            double[][] output_test = SinusTrainSet(20)[1];
            trainer.IsLearning = false;
            trainer.TrainClassification(input_test, output_test);
            error = trainer.GetError();
            Console.Out.WriteLine(error);
            for (int i = 0; i < input_test.Length; i++)
            {
                network.SetInput(input_test[i]);
                Show(new [] { input_test[i][0], network.GetOutput()[0], Math.Sin(input_test[i][0]) });
            }
        }
Пример #8
0
        public void TestAlternativeMatrix()
        {
            double[][] from_l1 = new double[][]
            {
                new double[] { -30, 20, 20 },
                new double[] { 10, -20, -20 }
            };
            double[][] from_l2 = new double[][]
            {
                new double[] { -10, 20, 20 }
            };
            double[][][] weights = new double[][][]
            {
                from_l1,
                from_l2
            };
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            n.SetWeightMatrix(weights);

            double[]   from_l1_expected = new double[] { -30, 20, 20, 10, -20, -20 };
            double[]   from_l2_expected = new double[] { -10, 20, 20 };
            double[][] weights_expected = new double[][]
            {
                from_l1_expected,
                from_l2_expected
            };
            Assert.AreEqual(weights_expected, n.GetWeightMatrix());
        }
Пример #9
0
    public NNetwork InitialiseCopy(int hiddenLayerCount, int hiddenNeuronCoint)
    {
        NNetwork n = new NNetwork();
        List <Matrix <float> > newWeights = new List <Matrix <float> >();

        for (int i = 0; i < this.weights.Count; i++)
        {
            Matrix <float> currentWeight = Matrix <float> .Build.Dense(weights[i].RowCount, weights[i].ColumnCount);

            for (int x = 0; x < currentWeight.RowCount; x++)
            {
                for (int y = 0; y < currentWeight.ColumnCount; y++)
                {
                    currentWeight[x, y] = weights[i][x, y];
                }
            }
            newWeights.Add(currentWeight);
        }
        List <float> newBisaes = new List <float>();

        newBisaes.AddRange(biases);

        n.weights = newWeights;
        n.biases  = newBisaes;

        n.InitialiseHidden(hiddenLayerCount, hiddenNeuronCoint);
        return(n);
    }
Пример #10
0
        public void TestDimensions()
        {
            int[]    neurons_in_layers = new int[] { 3, 4, 2, 1 };
            NNetwork network           = NNetwork.SigmoidNetwork(neurons_in_layers);

            Assert.AreEqual(network.LayerCount, neurons_in_layers.Length);
            Assert.AreEqual(network.NeuronsInLayersWithoutBias, neurons_in_layers);
        }
Пример #11
0
 private void FillPopulationWithRandomValues(NNetwork[] newPopulation, int startingIndex)
 {
     while (startingIndex < initialPopulation)
     {
         newPopulation[startingIndex] = new NNetwork();
         newPopulation[startingIndex].Initialise(controller.LAYER, controller.NEURON);
         startingIndex++;
     }
 }
Пример #12
0
    private void Awake()
    {
        SP = transform.position;
        SR = transform.eulerAngles;
        EnvironmentControler = GetComponent <NNetwork>();


        EnvironmentControler.Initialise(LAYER, NEURON);
    }
Пример #13
0
        public void TrainPrediction()
        {
            NNetwork       network = NNetwork.SigmoidNetwork(new int[] { 5, 2, 2 });
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[] train_set = new double[] { 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1 };
            trainer.TrainPrediction(train_set);
            //todo
        }
Пример #14
0
        public void TestInputEqualsOutput()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1 });

            n.SetWeightMatrix(new double[][]
            {
                new double[] { 1, 1 }
            });
            n.SetInput(new double[] { 9 });
            Assert.AreEqual(n.GetOutput()[0], 9);
        }
Пример #15
0
 public void Death(float performance, NNetwork network)
 {
     if (currentGenome < population.Length - 1)
     {
         population[currentGenome].performance = performance;
         currentGenome++;
         ResetToCurrentGenome();
     }
     else
     {
         RePopulate();
     }
 }
Пример #16
0
        public void TestBackPropWithKnownValues()
        {
            NNetwork n = NetworkTest.XorNetwork();

            n.SetInput(new double[] { 1, 1 });
            n.SetAnswers(new double[] { 0 });
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(2);
            Assert.AreNotEqual(deltas[0], 0);
            Assert.AreNotEqual(deltas[1], 0);
            MyAssert.CloseTo(deltas[0], 0, 0.001);
            MyAssert.CloseTo(deltas[1], 0, 0.001);
        }
Пример #17
0
        public void IfInputNeuronsWereChangedThanBeforeCalculatingOutputInvalidateCache()
        {
            NNetwork n = XorNetwork();

            n.CacheEnabled = true;
            n.SetInput(new double[] { 0, 0 });
            var first = n.GetOutput();

            n.SetInput(new double[] { 1, 0 });
            var second = n.GetOutput();

            Assert.AreNotEqual(first, second);
        }
Пример #18
0
        public void ActivateCachingAcrossAllNetwork()
        {
            NNetwork n = XorNetwork();

            n.CacheEnabled = false;
            n.SetInput(new double[] { 0, 0 });
            long without_cache = MyAssert.MeasureMethod(() => n.GetOutput(), 400);

            n.CacheEnabled = true;
            long with_cache = MyAssert.MeasureMethod(() => n.GetOutput(), 400);

            Assert.Greater(without_cache / with_cache, 1.9);
        }
Пример #19
0
        public static NNetwork XorNetwork()
        {
            double[]   from_l1 = new double[] { -30, 20, 20, 10, -20, -20 };
            double[]   from_l2 = new double[] { -10, 20, 20 };
            double[][] weights = new double[][]
            {
                from_l1,
                from_l2
            };
            NNetwork xor_network = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            xor_network.SetWeightMatrix(weights);
            return(xor_network);
        }
Пример #20
0
        public void CanApplyTrainingForWholeNetwork()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1, 2, 2, 1 });

            n.SetInput(new double[] { 0.3 });
            n.SetAnswers(new double[] { 0.8 });
            n.BackPropagate();
            var output_before = n.GetOutput();

            n.ApplyTraining();
            var output_after = n.GetOutput();

            Assert.AreNotEqual(output_after, output_before);
        }
Пример #21
0
        public void TestWeightMatrix()
        {
            double[]   from_l1 = new double[] { -30, 20, 20, 10, -20, -20 };
            double[]   from_l2 = new double[] { -10, 20, 20 };
            double[][] weights = new double[][]
            {
                from_l1,
                from_l2
            };
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 2, 1 });

            n.SetWeightMatrix(weights);
            Assert.AreEqual(n.GetWeightMatrix(), weights);
        }
Пример #22
0
        public void DimensionTestCheck()
        {
            NNetwork       network = NNetwork.SigmoidNetwork(new int[] { 2, 4, 3 });
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[][] incorrect_input  = new double[1][] { new double[3] };
            double[][] correct_input    = new double[1][] { new double[2] };
            double[][] incorrect_output = new double[1][] { new double[4] };
            double[][] correct_output   = new double[1][] { new double[3] };
            Assert.Throws(typeof(IncorrectInputDimensionException),
                          () => trainer.TrainClassification(incorrect_input, correct_output));
            Assert.Throws(typeof(IncorrectOutputDimensionException),
                          () => trainer.TrainClassification(correct_input, incorrect_output));
        }
Пример #23
0
        public void TestCostFunctionAccumulation()
        {
            NNetwork       network = NNetwork.SigmoidNetwork(new int[] { 2, 4, 3 });
            NetworkTrainer trainer = new NetworkTrainer(network);

            double[] train_set = new[] { 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1 };
            Assert.Throws(typeof(NoErrorInfoYetException), () => trainer.GetError());
            double error;

            trainer.TrainPrediction(train_set);
            error = trainer.GetError();
            Assert.AreNotEqual(error, 0);
            trainer.TrainPrediction(train_set);
            Assert.AreNotEqual(error, trainer.GetError());
        }
Пример #24
0
 private void SortPopulation()
 {
     for (int i = 0; i < population.Length; i++)
     {
         for (int j = i; j < population.Length; j++)
         {
             if (population[i].performance < population[j].performance)
             {
                 NNetwork temp = population[i];
                 population[i] = population[j];
                 population[j] = temp;
             }
         }
     }
 }
Пример #25
0
        public void TestSimplestConnection()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 1, 1 });

            n.SetWeightMatrix(new double[][]
            {
                new double[] { 1, 1 },
                new double[] { 1, 1 }
            });
            n.SetInput(new double[] { 1 });
            var output  = n.GetOutput()[0];
            var desired = 1 / (1 + Math.Pow(Math.E, -2));

            MyAssert.CloseTo(output, desired);
        }
Пример #26
0
        public void TestNetworkSetAnswerAndGetDelta()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 3, 2 });

            n.SetInput(new double[] { 0, 0 });
            double[] outputs = n.GetOutput();
            double[] answers = new double[] { 0.1, 0.9 };
            n.SetAnswers(answers);
            n.BackPropagate();
            double[] deltas = n.GetDeltasForLayer(3);
            for (int i = 0; i < answers.Length; i++)
            {
                MyAssert.CloseTo(deltas[i], answers[i] - outputs[i]);
            }
        }
Пример #27
0
        public void TestRandomInit()
        {
            NNetwork n = NNetwork.SigmoidNetwork(new int[] { 2, 3, 2 });

            n.RandomizeWeights(seed: 0);
            var first = n.GetWeightMatrix();

            n.RandomizeWeights(seed: 0);
            var equal = n.GetWeightMatrix();

            Assert.AreEqual(first, equal);
            n.RandomizeWeights(seed: 1);
            var not_equal = n.GetWeightMatrix();

            Assert.AreNotEqual(first, not_equal);
        }
Пример #28
0
        public static void Main(string[] args)
        {
            var numberOfInputNeurons  = 2;
            var numberOfOutputNeurons = 1;
            var network = new NNetwork(numberOfInputNeurons, numberOfOutputNeurons, 1);

            var layerFactory = new NeuralLayerFactory();

            //// TODO: Mix these 2 lines. eliminate the foreach
            //var inputLayer = layerFactory.CreateNeuralLayer(numberOfInputNeurons, new RectifiedActivationFuncion());
            //foreach (var x in inputLayer.Neurons)
            //{
            //    x.AddInputSynapse(0);
            //}

            //network.AddLayer(inputLayer);
            //network.AddLayer(layerFactory.CreateNeuralLayer(3, new SigmoidActivationFunction(0.7)));
            //network.AddLayer(layerFactory.CreateNeuralLayer(1, new SigmoidActivationFunction(0.7)));

            //network.PushExpectedValues(
            //    new double[][] {
            //    new double[] { 0 },
            //    new double[] { 1 },
            //    new double[] { 1 },
            //    new double[] { 1 },
            //    });

            //network.Train(
            //    new double[][] {
            //    new double[] { 0, 0},
            //    new double[] { 0, 1},
            //    new double[] { 1, 0},
            //    new double[] { 1, 1},
            //    }, 10000);

            //network.PushInputValues(new double[] { 1, 1 });
            //var outputs = network.GetOutput();

            //foreach (var output in outputs)
            //{
            //    System.Console.WriteLine(output);
            //}
        }
Пример #29
0
        public static void TestTanhDerivative()
        {
            NNetwork n = NNetwork.HyperbolicNetwork(new int[] { 2, 2, 1 });

            n.RandomizeWeights(-1, 10);
            Random random = new Random();
            double x;
            double y;
            double z;

            x = random.NextDouble();
            y = random.NextDouble();
            z = some_function(x, y);
            n.SetInput(new double[] { x, y });
            n.SetAnswers(new double[] { z });
            n.BackPropagate();
            double[] ders = n.Derivatives();
            double[] ests = n.Estimation(0.0001);
            for (int i = 0; i < ders.Length; i++)
            {
                Show(new[] { ders[i], ests[i], ests[i] / ders[i] });
            }
        }
Пример #30
0
//        public static void Main()
//        {
//            TrainPrediction();
////            Sinus();
////            TestTanhLearningOnSinus();
////            TestTanhDerivative();
//
//        }

        public static void TrainPrediction()
        {
            NNetwork network = NNetwork.SigmoidNetwork(new int[] { 5, 1 });

            network.RandomizeWeights(-1, 20);
            NetworkTrainer trainer = new NetworkTrainer(network);
            List <double>  tr      = new List <double>();

            for (double i = 0; i <= 1; i = i + 0.05)
            {
                tr.Add(i);
            }
            double[] train_set = tr.ToArray();//new double[] { 0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1 };
            double   error     = 1;
            double   delta     = 1;
            int      j         = 0;

            for (; error > 0.01 && !(delta <= 0.00001) || j == 1; j++)
            {
                trainer.TrainPrediction(train_set, 0.0001, 0.2);
                double new_cost = trainer.GetError();
                delta = error - new_cost;
                error = new_cost;
            }
            Console.Out.WriteLine(j + ": " + error);
            for (double i = 0; i <= 0.5; i = i + 0.05)
            {
                network.SetInput(new double[] { i + 0.0, i + 0.1, i + 0.2, i + 0.3, i + 0.4 });
                Show(new double[]
                {
                    i + 0.5,
                    network.GetOutput()[0],
//                        network.GetOutput()[1]
                });
            }
        }
 public BackPropagationLearningAlgorithm(NNetwork nn)
     : base(nn)
 {
 }
 public GeneticLearningAlgorithm(NNetwork nn)
     : base(nn)
 {
 }
Пример #33
0
 public LearningAlgorithm(NNetwork nn, Perform per)
     : this(nn)
 {
     Per = Performance.Create(per);
 }
Пример #34
0
 public LearningAlgorithm(NNetwork nn)
 {
     Error = 1;
     this.NN = nn;
 }