Beispiel #1
0
        static void Main(string[] args)
        {
            var nnXor = new NeuralNetwork.NeuralNetwork();

            Console.WriteLine("Xor:");
            nnXor.Train(new[]
            {
                new TrainData(new [] { 0, 0.0 }, new [] { 0 }),
                new TrainData(new [] { 0, 1.0 }, new [] { 1 }),
                new TrainData(new [] { 1, 0.0 }, new [] { 1 }),
                new TrainData(new [] { 1, 1.0 }, new [] { 0 })
            });

            Console.WriteLine($"0, 0: {nnXor.Calculate(new[] { 0.0, 0 })[0]}");
            Console.WriteLine($"0, 1: {nnXor.Calculate(new[] { 0.0, 1 })[0]}");
            Console.WriteLine($"1, 0: {nnXor.Calculate(new[] { 1.0, 0 })[0]}");
            Console.WriteLine($"1, 1: {nnXor.Calculate(new[] { 1.0, 1 })[0]}");

            Console.WriteLine("img:");
            var nnImg = new NeuralNetwork.NeuralNetwork();

            List <TrainData>[] trainData = new List <TrainData> [10];
            Parallel.For(0, 10, i =>
            {
                trainData[i] = GetTrainDataFromFile(i);
            });
            nnImg.Train(trainData.SelectMany(x => x).ToArray(), maxEpoch: 2);

            Console.ReadKey();
        }
        public void BuildFromDNA_Should_Set_Next_Layers_Dendrites_Weights_From_DNA()
        {
            NeuralNetwork.NeuralNetwork nn      = new NeuralNetwork.NeuralNetwork();
            int           inputCount            = 2;
            int           outputCount           = 2;
            int           depth                 = 1;
            int           hiddenNeuronsPerLayer = 2;
            List <double> weights               = new List <double>
            {
                1, 2,
                3, 4,

                5, 6,
                7, 8,
            };
            string DNA = $"{inputCount}||{outputCount}||{depth}||{hiddenNeuronsPerLayer}||{string.Join("||", weights)}";

            nn.BuildFromDNA(DNA);

            nn.HiddenLayers[0].Neurons[0].Dendrites[0].Weight.Should().Be(weights[0]);
            nn.HiddenLayers[0].Neurons[0].Dendrites[1].Weight.Should().Be(weights[1]);
            nn.HiddenLayers[0].Neurons[1].Dendrites[0].Weight.Should().Be(weights[2]);
            nn.HiddenLayers[0].Neurons[1].Dendrites[1].Weight.Should().Be(weights[3]);
            nn.OutputLayer.Neurons[0].Dendrites[0].Weight.Should().Be(weights[4]);
            nn.OutputLayer.Neurons[0].Dendrites[1].Weight.Should().Be(weights[5]);
            nn.OutputLayer.Neurons[1].Dendrites[0].Weight.Should().Be(weights[6]);
            nn.OutputLayer.Neurons[1].Dendrites[1].Weight.Should().Be(weights[7]);
        }
Beispiel #3
0
        static void Main(string[] args)
        {
            int epochcount = 6000;
            int logPointC  = 4000;

            /*basic XOR test in few updates
             * O - O \
             *   X    O
             * O - O /
             * */
            NeuralDataSet dataForLearning = CreateNewData(1);

            NeuralNetwork.NeuralNetwork neuralNetwork = new NeuralNetwork.NeuralNetwork(new int[] { 40 }, 2, 1);
            DateTime time = DateTime.Now;

            neuralNetwork.Initialise();
            neuralNetwork.StudySpeed  = 0.1;
            neuralNetwork.studyMoment = 0.1;
            neuralNetwork.RandomiseWeights();
            neuralNetwork.SetDataSet(dataForLearning);

            string log = neuralNetwork.StudyHyperbola(epochcount, logPointC);

            TimeSpan dif = DateTime.Now - time;

            Console.WriteLine("Initialising and studiing for {1} epoches succesful, time spent: {0}", dif.ToString(), epochcount);
            System.IO.StreamWriter writer = new StreamWriter("C:\\Users\\Aleksandr\\Desktop\\log.csv", false);
            writer.WriteLine(log);
            writer.Close();
            TestNN(neuralNetwork);
            Console.ReadKey();
        }
        public void ProcessInput_Should_Process_Input_Pulses_To_Output_Neurons()
        {
            NeuralNetwork.NeuralNetwork nn      = new NeuralNetwork.NeuralNetwork();
            int           inputCount            = 2;
            int           outputCount           = 2;
            int           depth                 = 1;
            int           hiddenNeuronsPerLayer = 2;
            List <double> weights               = new List <double>
            {
                1, -1,
                -1, 1,

                1, -1,
                -1, 1,
            };
            string DNA = $"{inputCount}||{outputCount}||{depth}||{hiddenNeuronsPerLayer}||{string.Join("||", weights)}";

            nn.BuildFromDNA(DNA);
            double A = 1;
            double B = -1;

            nn.ProcessInput(new List <double> {
                A, B
            });

            nn.OutputLayer.Neurons[0].PulseValue.Should().Be(Math.Tanh(Math.Tanh(Math.Tanh(A) - Math.Tanh(B)) - Math.Tanh(Math.Tanh(B) - Math.Tanh(A))));
            nn.OutputLayer.Neurons[1].PulseValue.Should().Be(Math.Tanh(Math.Tanh(Math.Tanh(B) - Math.Tanh(A)) - Math.Tanh(Math.Tanh(A) - Math.Tanh(B))));
        }
Beispiel #5
0
        public static NeuralNetwork.NeuralNetwork CreateNeuralNetwork()
        {
            var neuralNetwork = new NeuralNetwork.NeuralNetwork
            {
                Bias               = Settings.Default.Bias,
                MinWeight          = Settings.Default.MinWeight,
                MaxWeight          = Settings.Default.MaxWeight,
                ActivationFunction = ActivationFunctionExtension.GetFunction(Settings.Default.ActivationFunction)
            };

            if (neuralNetwork.ActivationFunction is SigmoidFunction)
            {
                neuralNetwork.ActivationFunction = new SigmoidFunction(Settings.Default.T);
            }

            if (Settings.Default.HiddenLayers.Length > 0)
            {
                var prevSize = Settings.Default.InputLayerSize;

                foreach (var hiddenLayer in Settings.Default.HiddenLayers)
                {
                    neuralNetwork.AddLayer(new Layer(hiddenLayer, prevSize));
                    prevSize = hiddenLayer;
                }

                neuralNetwork.AddLayer(new Layer(Settings.Default.OutputLayerSize, prevSize));
            }
            else
            {
                neuralNetwork.AddLayer(new Layer(Settings.Default.OutputLayerSize, Settings.Default.InputLayerSize));
            }

            return(neuralNetwork);
        }
Beispiel #6
0
 public SmartGenAlgorithm(GeneticAlgorithm.GeneticAlgorithm geneticAlgorithm,
                          NeuralNetwork.NeuralNetwork neuralNetwork)
 {
     _geneticAlgorithm = geneticAlgorithm;
     _neuralNetwork    = neuralNetwork;
     IsPaused          = false;
 }
Beispiel #7
0
        private NeuralNetwork.NeuralNetwork CreateNn()
        {
            NeuralNetwork.NeuralNetwork nn;

            nn = new NeuralNetwork.NeuralNetwork();

            InputLayer inputLayer = nn.CreateInputLayer();

            //inHeading = inputLayer.CreateNeuron("heading");
            //inVelocityAngle = inputLayer.CreateNeuron("v_angle");
            //inVelocityLength = inputLayer.CreateNeuron("v_length");
            inNextCheckpointDistance0  = inputLayer.CreateNeuron("c_dist0");
            inNextCheckpointDistance1  = inputLayer.CreateNeuron("c_dist1");
            inNextCheckpointDistance2  = inputLayer.CreateNeuron("c_dist2");
            inNextCheckpointDistance3  = inputLayer.CreateNeuron("c_dist3");
            inNextCheckpointAngle0     = inputLayer.CreateNeuron("c_angle0");
            inNextCheckpointAngle1     = inputLayer.CreateNeuron("c_angle1");
            inNextCheckpointAngle2     = inputLayer.CreateNeuron("c_angle2");
            inNextCheckpointAngle3     = inputLayer.CreateNeuron("c_angle3");
            inNextCheckpointAngle4     = inputLayer.CreateNeuron("c_angle4");
            inNextCheckpointAngle5     = inputLayer.CreateNeuron("c_angle5");
            inNextNextCheckpointAngle0 = inputLayer.CreateNeuron("nnc_angle0");
            inNextNextCheckpointAngle1 = inputLayer.CreateNeuron("nnc_angle1");
            inNextNextCheckpointAngle2 = inputLayer.CreateNeuron("nnc_angle2");
            inNextNextCheckpointAngle3 = inputLayer.CreateNeuron("nnc_angle3");
            inNextNextCheckpointAngle4 = inputLayer.CreateNeuron("nnc_angle4");
            inNextNextCheckpointAngle5 = inputLayer.CreateNeuron("nnc_angle5");
            //inNextCheckpointDistance = inputLayer.CreateNeuron("c_dist");

            OutputLayer outputLayer = nn.CreateOutputLayer();

            outHeading0 = outputLayer.CreateNeuron("o_heading0");
            outHeading1 = outputLayer.CreateNeuron("o_heading1");
            outHeading2 = outputLayer.CreateNeuron("o_heading2");
            outHeading3 = outputLayer.CreateNeuron("o_heading3");
            outHeading4 = outputLayer.CreateNeuron("o_heading4");
            outHeading5 = outputLayer.CreateNeuron("o_heading5");
            outThrust0  = outputLayer.CreateNeuron("o_thrust0");
            outThrust1  = outputLayer.CreateNeuron("o_thrust1");
            outThrust2  = outputLayer.CreateNeuron("o_thrust2");
            outThrust3  = outputLayer.CreateNeuron("o_thrust3");
            outThrust4  = outputLayer.CreateNeuron("o_thrust4");
            outThrust5  = outputLayer.CreateNeuron("o_thrust5");
            outThrust6  = outputLayer.CreateNeuron("o_thrust6");

            for (int i = 0; i < 3; i++)
            {
                HiddenLayer hiddenLayer = nn.CreateHiddenLayer();

                for (int j = 0; j < 32; j++)
                {
                    HiddenNeuron hiddenNeuron = hiddenLayer.CreateNeuron(string.Format("hidden[{0}][{1}]", i, j));
                }
            }

            nn.CreateFullConnections();
            nn.InitWithRandomValues();

            return(nn);
        }
Beispiel #8
0
 private void NeuralNetwork_Click(object sender, EventArgs e)
 {
     numInputs  = (int)numericUpDown1.Value;
     numHidden  = (int)numericUpDown2.Value;
     numOutputs = (int)numericUpDown3.Value;
     nn         = new NeuralNetwork.NeuralNetwork(numInputs, numHidden, numOutputs);
     TrainingData_Click(null, null);
 }
Beispiel #9
0
 public ShapeNet()
 {
     Name            = "";
     lNumberExamples = 0;
     SearchFolders   = true;
     ShapeList       = new List <ShapeListEntry>(0);
     NeuralNetwork   = new NeuralNetwork.NeuralNetwork();
 }
Beispiel #10
0
 void OnTriggerEnter2D(Collider2D collision)
 {
     if (!collision.CompareTag("point"))
     {
         population.SetFitnessOfCurrIndividual(dist);
         currNN = population.Next();
         ResetCarPosition();
     }
 }
Beispiel #11
0
    public JsonResult UploadVector([FromBody] byte[] vector)
    {
        var nn = new NeuralNetwork.NeuralNetwork(784, 20, 10, 0.1, ActivationFunction.Sigmoid);

        nn.InitNetwork("test.dat");
        int result = nn.TestNetwork(vector);

        return(Json(result));
    }
 void Create()
 {
     network = new NeuralNetwork.NeuralNetwork(input.Length, layers);
     if (genetic)
         learning = new GeneticLearningAlgorithm(network);
     else
         learning = new BackPropagationLearningAlgorithm(network);
     network.randomizeAll();
     network.LearningAlg = learning;
 }
        public void BuildFromDNA_Should_Return_InputLayer_And_OutputLayer_With_Given_Sizes_In_DNA()
        {
            NeuralNetwork.NeuralNetwork nn = new NeuralNetwork.NeuralNetwork();
            int    inputCount  = 5;
            int    outputCount = 10;
            string DNA         = $"{inputCount}||{outputCount}";

            nn.BuildFromDNA(DNA);

            nn.InputLayer.Neurons.Should().HaveCount(inputCount);
            nn.OutputLayer.Neurons.Should().HaveCount(outputCount);
        }
Beispiel #14
0
 // Use this for initialization
 void Start()
 {
     dist            = 0;
     population      = new Population(10, new int[] { 6, 64, 1 }, 1f, 30);
     raycastPoint    = transform.Find("RaycastPoint2");
     topSensorPos    = transform.Find("RaycastPoint1");
     bottomSensorPos = transform.Find("RaycastPoint3");
     startPosition   = transform.position;
     currCarPos      = lastCarPos = startPosition;
     currNN          = population.Next();
     startPosCave1   = GameControl.Instance.background1.GetComponent <Rigidbody2D>().position;
     startPosCave2   = GameControl.Instance.background2.GetComponent <Rigidbody2D>().position;
 }
        public void BuildFromDNA_Should_Create_HiddenLayers_From_Depth_And_HiddenNeuronsPerLayer_In_DNA()
        {
            NeuralNetwork.NeuralNetwork nn = new NeuralNetwork.NeuralNetwork();
            int    inputCount            = 5;
            int    outputCount           = 8;
            int    depth                 = 5;
            int    hiddenNeuronsPerLayer = 10;
            string DNA = $"{inputCount}||{outputCount}||{depth}||{hiddenNeuronsPerLayer}";

            nn.BuildFromDNA(DNA);

            nn.HiddenLayers.Should().HaveCount(depth);
        }
Beispiel #16
0
        public void RunXORNeuralNetwork()
        {
            var activationFunctionSigmoid = new TanHActivationFunction( );
            var learningRate = .05;

            XORNetwork = new NeuralNetwork.NeuralNetwork(new int[] { 2, 3, 1 }, activationFunctionSigmoid, learningRate);

            var random = new Random(( int )DateTime.Now.Ticks);

            var pass = 0;
            var fail = 0;

            for (double i = 1; i < 100000; i++)
            {
                var j = ( int )(random.NextDouble( ) * 2);
                var k = ( int )(random.NextDouble( ) * 2);

                var inputs          = new double[] { j, k };
                var expectedOutputs = new double[] { j ^ k };

                var actualOutputs = XORNetwork.RunNetwork(inputs);
                XORNetwork.Train(inputs, expectedOutputs);

                if (((j ^ k) == 1 && actualOutputs[0] > .55) || ((j ^ k) == 0 && actualOutputs[0] < .45))
                {
                    pass++;
                }
                else
                {
                    fail++;
                };

                //Console.WriteLine( $"{j} | {k} = {j | k}, {actualOutputs[ 0 ]}" );

                if (i % 100 == 0)
                {
                    Console.WriteLine($"Pass: {pass}, Fail: {fail}, {( double )pass*100 / ( pass + fail )}% ; ");
                    Console.WriteLine($"{j} ^ {k} = {j ^ k}, {actualOutputs[ 0 ]}");

                    if (fail == 0)
                    {
                        Console.WriteLine($"Learning complete, learned in {i}");
                        return;
                    }

                    pass = 0;
                    fail = 0;
                }
            }
        }
 void Create()
 {
     network = new NeuralNetwork.NeuralNetwork(input.Length, layers);
     if (genetic)
     {
         learning = new GeneticLearningAlgorithm(network);
     }
     else
     {
         learning = new BackPropagationLearningAlgorithm(network);
     }
     network.randomizeAll();
     network.LearningAlg = learning;
 }
        public DQN(MathOperationManager mathManager, DQNNeuralNetworkConfiguration configuration)
        {
            if (configuration.LossFunction != LossFunctionType.BellmanError)
            {
                throw new ArgumentException("DQN only supports Bellman error. Pls check the configuration passed in.");
            }

            base.CreateNeuralNetwork(mathManager, configuration);

            this.DQNConfiguration = configuration;
            this.gradientStepCount = 0;

            var nnStore = this.CreateNeuralNetworkStore();
            this.QHat = new NeuralNetwork(this.mathManager, nnStore);
        }
Beispiel #19
0
    // Use this for initialization
    void Start()
    {
        population = new Population(10, new int[] { 5, 200, 2 }, 1f);

        raycastPoint    = transform.Find("RaycastPoint");
        environments    = GameObject.Find("Environment");
        driveController = GetComponent <AI_DriveController>();

        startPosition = transform.position;
        startRotation = transform.rotation;

        currCarPos = lastCarPos = startPosition;

        currNN = population.Next();
    }
Beispiel #20
0
 private static void TestNN(NeuralNetwork.NeuralNetwork neuralNetwork)
 {
     neuralNetwork.SetInputValues(new double[] { 0, 0 });
     neuralNetwork.PoolInputsToOutputHyperbola();
     Console.WriteLine("input 0,0. Result:{0:0.000}", neuralNetwork.Output[0]);
     neuralNetwork.SetInputValues(new double[] { 0, 1 });
     neuralNetwork.PoolInputsToOutputHyperbola();
     Console.WriteLine("input 0,1. Result:{0:0.000}", neuralNetwork.Output[0]);
     neuralNetwork.SetInputValues(new double[] { 1, 0 });
     neuralNetwork.PoolInputsToOutputHyperbola();
     Console.WriteLine("input 1,0. Result:{0:0.000}", neuralNetwork.Output[0]);
     neuralNetwork.SetInputValues(new double[] { 1, 1 });
     neuralNetwork.PoolInputsToOutputHyperbola();
     Console.WriteLine("input 1,1. Result:{0:0.000}", neuralNetwork.Output[0]);
 }
Beispiel #21
0
        static void Main(string[] args)
        {
            var rand = new Random();

            NeuralNetwork.NeuralNetwork nn = new NeuralNetwork.NeuralNetwork(10, new[] { 2, 4, 4, 1 });

            for (int cnt = 0; cnt < 1000; cnt++)
            {
                var trainingInputs  = new List <Data>();
                var trainingOutputs = new List <Data>();
                for (int i = 0; i < 10; i++)
                {
                    var a = rand.NextDouble();
                    var b = rand.NextDouble();
                    trainingInputs.Add(new Data {
                        a, b
                    });
                    trainingOutputs.Add(new Data {
                        a *b
                    });
                }
                nn.Train(trainingInputs, trainingOutputs);

                var testInputs  = new List <Data>();
                var testOutputs = new List <Data>();
                for (int i = 0; i < 1000; i++)
                {
                    var a = rand.NextDouble();
                    var b = rand.NextDouble();
                    testInputs.Add(new Data {
                        a, b
                    });
                    testOutputs.Add(new Data {
                        a *b
                    });
                }

                var accuracy = nn.GetAccuracy(testInputs, testOutputs);
                Console.WriteLine(accuracy);
            }

            var res = nn.Run(new Data {
                0.5, 0.5
            });

            Console.Read();
        }
Beispiel #22
0
        public PilotC(NeuralNetwork.NeuralNetwork nn)
        {
            if (nn == null)
            {
                this.nn = CreateNn();
            }
            else
            {
                this.nn = nn;


                inNextCheckpointDistance0 = (InputNeuron)nn.InputLayer.Neurons[0];
                inNextCheckpointDistance1 = (InputNeuron)nn.InputLayer.Neurons[1];
                inNextCheckpointDistance2 = (InputNeuron)nn.InputLayer.Neurons[2];
                inNextCheckpointDistance3 = (InputNeuron)nn.InputLayer.Neurons[3];

                inNextCheckpointAngle0 = (InputNeuron)nn.InputLayer.Neurons[4];
                inNextCheckpointAngle1 = (InputNeuron)nn.InputLayer.Neurons[5];
                inNextCheckpointAngle2 = (InputNeuron)nn.InputLayer.Neurons[6];
                inNextCheckpointAngle3 = (InputNeuron)nn.InputLayer.Neurons[7];
                inNextCheckpointAngle4 = (InputNeuron)nn.InputLayer.Neurons[8];
                inNextCheckpointAngle5 = (InputNeuron)nn.InputLayer.Neurons[9];

                inNextNextCheckpointAngle0 = (InputNeuron)nn.InputLayer.Neurons[10];
                inNextNextCheckpointAngle1 = (InputNeuron)nn.InputLayer.Neurons[11];
                inNextNextCheckpointAngle2 = (InputNeuron)nn.InputLayer.Neurons[12];
                inNextNextCheckpointAngle3 = (InputNeuron)nn.InputLayer.Neurons[13];
                inNextNextCheckpointAngle4 = (InputNeuron)nn.InputLayer.Neurons[14];
                inNextNextCheckpointAngle5 = (InputNeuron)nn.InputLayer.Neurons[15];
                //inNextCheckpointDistance = (InputNeuron)nn.InputLayer.Neurons[1];

                outHeading0 = (OutputNeuron)nn.OutputLayer.Neurons[0];
                outHeading1 = (OutputNeuron)nn.OutputLayer.Neurons[1];
                outHeading2 = (OutputNeuron)nn.OutputLayer.Neurons[2];
                outHeading3 = (OutputNeuron)nn.OutputLayer.Neurons[3];
                outHeading4 = (OutputNeuron)nn.OutputLayer.Neurons[4];
                outHeading5 = (OutputNeuron)nn.OutputLayer.Neurons[5];

                outThrust0 = (OutputNeuron)nn.OutputLayer.Neurons[6];
                outThrust1 = (OutputNeuron)nn.OutputLayer.Neurons[7];
                outThrust2 = (OutputNeuron)nn.OutputLayer.Neurons[8];
                outThrust3 = (OutputNeuron)nn.OutputLayer.Neurons[9];
                outThrust4 = (OutputNeuron)nn.OutputLayer.Neurons[10];
                outThrust5 = (OutputNeuron)nn.OutputLayer.Neurons[11];
                outThrust6 = (OutputNeuron)nn.OutputLayer.Neurons[12];
            }
        }
 /// <summary>
 /// Creates a new neural network.
 /// </summary>
 /// <param name="filename">Name of the file to save/load</param>
 /// <param name="inputs">Matrix of min-max input values (used to internally convert inputs between 0 and 1)</param>
 /// <param name="outputs">Number of outputs</param>
 public AI_DeepLearningNetwork(string filename, float[,] inputs, float[,] outputs)
 {
     this.Filename     = filename;
     this.InputLimits  = inputs;
     this.OutputLimits = outputs;
     int[] number_neurons = new int[3];
     number_neurons[0] = inputs.Length;
     number_neurons[1] = Mathf.Max(inputs.Length, outputs.Length);
     number_neurons[2] = outputs.Length;
     try { this.Network = NeuralNetwork.NeuralNetwork.load(filename); } catch { this.Network = null; }
     if ((this.Network == null) || (this.Network.N_Inputs != inputs.Length) || (this.Network.N_Outputs != outputs.Length))
     {
         Debug.Log("[AI] Creating network \"" + filename + "\"...");
         this.Network = new NeuralNetwork.NeuralNetwork(inputs.Length, number_neurons);
         this.Reset();
     }
 }
Beispiel #24
0
 public Form1()
 {
     InitializeComponent();
     PerceptronInputs = new List <Input>();
     myDrawing        = new MyDrawing();
     // Transfer function 0 == binary, 1 == sigmoid
     _neuralNetwork = new NeuralNetwork.NeuralNetwork(2, 2, 1);
     _perceptron    = new Perceptron(ParseData("AND.txt"));
     //_perceptron.PerceptronNeuron.TrainUntil(_perceptron.TrainingSet, 0.2);
     myDrawing.DrawPerceptron(_perceptron, perceptronPictureBox);
     InitControls();
     refreshNetwork();
     errorChart.Series.Clear();
     //Hopfield
     _hopfield = new Hopfield(5, 7, 50);
     _hopImage = new List <double>();
 }
Beispiel #25
0
        public NeuralForm(NeuralNetwork.NeuralNetwork n)
        {
            var bigger = 0;

            foreach (var layer in n.Layers)
            {
                if (layer.NeuronCount > bigger)
                {
                    bigger = layer.NeuronCount;
                }
            }


            img = new Bitmap(size * 5 * n.Layers.Count * 3, (size * 3) * (bigger) + size);

            InitializeComponent();
        }
        private static void DNN(IEnumerable<BatchInputWrapper> trainData, IEnumerable<BatchInputWrapper> cvData)
        {
            using (MathOperationManager mathManager = new MathOperationManager(MathType.GPU))
            {
                var hiddenLayers = new List<int>();
                hiddenLayers.Add(100);
                hiddenLayers.Add(100);
                NeuralNetworkConfiguration config = new NeuralNetworkConfiguration(784, hiddenLayers, 10);
                config.Epochs = 100;
                config.StepSize = (float)1.5;
                //config.Activation = NeuronActivationType.ReLu;

                using (NeuralNetwork dnn = new NeuralNetwork(mathManager, config))
                {
                    dnn.MiniBatchStochasticGradientDescent(trainData, cvData);
                }
            }
        }
Beispiel #27
0
        static void Main(string[] args)
        {
            var nn = new NeuralNetwork.NeuralNetwork(Equations.Sigmoid, 1, 2, 4, 8, 16, 32, 1);

            //set weights
            foreach (var layer in nn.layers)
            {
                foreach (var neuron in layer.Neurons)
                {
                    neuron.Bias = 0;
                    for (int i = 0; i < neuron.Weights.Length; i++)
                    {
                        neuron.Weights[i] = 1;
                    }
                }
            }

            Console.WriteLine(nn.Compute(new double[] { 1 })[0]);
        }
        public void BuildFromDNA_Should_Set_All_Neurons_Id_According_To_Its_Place_In_Layers()
        {
            NeuralNetwork.NeuralNetwork nn = new NeuralNetwork.NeuralNetwork();
            int    inputCount  = 5;
            int    outputCount = 10;
            string DNA         = $"{inputCount}||{outputCount}";

            nn.BuildFromDNA(DNA);

            nn.InputLayer.Id.Should().NotBe(nn.OutputLayer.Id);
            foreach (Neuron n in nn.InputLayer.Neurons)
            {
                n.Id.Item1.Should().Be(nn.InputLayer.Id);
                nn.InputLayer.Neurons.Should().ContainSingle(neuron => neuron.Id.Item2 == n.Id.Item2);
            }
            foreach (Neuron n in nn.OutputLayer.Neurons)
            {
                n.Id.Item1.Should().Be(nn.OutputLayer.Id);
                nn.OutputLayer.Neurons.Should().ContainSingle(neuron => neuron.Id.Item2 == n.Id.Item2);
            }
        }
        public void BuildFromDNA_Should_Create_Next_Layers_Dendrites_With_Correct_SourceIds()
        {
            NeuralNetwork.NeuralNetwork nn = new NeuralNetwork.NeuralNetwork();
            int    inputCount            = 2;
            int    outputCount           = 2;
            int    depth                 = 1;
            int    hiddenNeuronsPerLayer = 2;
            string DNA = $"{inputCount}||{outputCount}||{depth}||{hiddenNeuronsPerLayer}";

            nn.BuildFromDNA(DNA);

            nn.HiddenLayers[0].Neurons[0].Dendrites.Should().HaveCount(2);
            nn.HiddenLayers[0].Neurons[0].Dendrites[0].SourceNeuronId.Item1.Should().Be(0);
            nn.HiddenLayers[0].Neurons[0].Dendrites[0].SourceNeuronId.Item2.Should().Be(0);

            nn.HiddenLayers[0].Neurons[0].Dendrites[1].SourceNeuronId.Item1.Should().Be(0);
            nn.HiddenLayers[0].Neurons[0].Dendrites[1].SourceNeuronId.Item2.Should().Be(1);


            nn.HiddenLayers[0].Neurons[1].Dendrites.Should().HaveCount(2);
            nn.HiddenLayers[0].Neurons[1].Dendrites[0].SourceNeuronId.Item1.Should().Be(0);
            nn.HiddenLayers[0].Neurons[1].Dendrites[0].SourceNeuronId.Item2.Should().Be(0);

            nn.HiddenLayers[0].Neurons[1].Dendrites[1].SourceNeuronId.Item1.Should().Be(0);
            nn.HiddenLayers[0].Neurons[1].Dendrites[1].SourceNeuronId.Item2.Should().Be(1);

            nn.OutputLayer.Neurons[0].Dendrites.Should().HaveCount(2);
            nn.OutputLayer.Neurons[0].Dendrites[0].SourceNeuronId.Item1.Should().Be(1);
            nn.OutputLayer.Neurons[0].Dendrites[0].SourceNeuronId.Item2.Should().Be(0);

            nn.OutputLayer.Neurons[0].Dendrites[1].SourceNeuronId.Item1.Should().Be(1);
            nn.OutputLayer.Neurons[0].Dendrites[1].SourceNeuronId.Item2.Should().Be(1);

            nn.OutputLayer.Neurons[1].Dendrites.Should().HaveCount(2);
            nn.OutputLayer.Neurons[1].Dendrites[0].SourceNeuronId.Item1.Should().Be(1);
            nn.OutputLayer.Neurons[1].Dendrites[0].SourceNeuronId.Item2.Should().Be(0);

            nn.OutputLayer.Neurons[1].Dendrites[1].SourceNeuronId.Item1.Should().Be(1);
            nn.OutputLayer.Neurons[1].Dendrites[1].SourceNeuronId.Item2.Should().Be(1);
        }
 void Start()
 {
     NeuralNetwork.NeuralNetwork nn = new NeuralNetwork.NeuralNetwork(1, new int[] { 5, 5, 2 });
     int numberOfData = 100;
     float[][] input = new float[numberOfData][];
     float[][] output = new float[numberOfData][];
     for (int a = 0; a < numberOfData; ++a)
     {
         float progress = 1f / (numberOfData - 1) * a;
         input[a] = new float[] { progress };
         output[a] = new float[] { progress, 1f - 2 * Mathf.Abs(0.5f - progress) };
     }
     nn.randomizeAll();
     nn.LearningAlg = new NeuralNetwork.GeneticLearningAlgorithm(nn);
     nn.LearningAlg.Learn(input, output);
     for (int a = 0; a <= 10; ++a)
     {
         float progress = 1f / (10) * a;
         float[] result = nn.Output(new float[] { progress });
         Debug.Log("Compute(" + a + "):\t\t" + result[0] + ",\t\t" + result[1] + "\t\t| err: " + Mathf.Abs(progress - result[0]) + ",\t\t" + Mathf.Abs((1f - 2 * Mathf.Abs(0.5f - progress) - result[1])));
     }
 }
Beispiel #31
0
        private void FillWeightsWithGenom(NeuralNetwork.NeuralNetwork nn, Genom genom)
        {
            int genIndex = 0;

            foreach (var intputNeuron in nn.InputLayer.Neurons.Cast <InputNeuron>())
            {
                intputNeuron.Bias = genom.Gens[genIndex];
                genIndex++;
            }

            foreach (var hiddenLayer in nn.HiddenLayers)
            {
                foreach (var hiddenNeuron in hiddenLayer.Neurons.Cast <HiddenNeuron>())
                {
                    hiddenNeuron.Bias = genom.Gens[genIndex];
                    genIndex++;

                    foreach (var axon in hiddenNeuron.Inputs)
                    {
                        axon.Weight = genom.Gens[genIndex];
                        genIndex++;
                    }
                }
            }

            foreach (var outputNeuron in nn.OutputLayer.Neurons.Cast <OutputNeuron>())
            {
                outputNeuron.Bias = genom.Gens[genIndex];
                genIndex++;

                foreach (var axon in outputNeuron.Inputs)
                {
                    axon.Weight = genom.Gens[genIndex];
                    genIndex++;
                }
            }
        }
Beispiel #32
0
        private void FillGenomWithWeights(Genom genom, NeuralNetwork.NeuralNetwork nn)
        {
            int genIndex = 0;

            foreach (var intputNeuron in nn.InputLayer.Neurons.Cast <InputNeuron>())
            {
                genom.Gens[genIndex] = intputNeuron.Bias;
                genIndex++;
            }

            foreach (var hiddenLayer in nn.HiddenLayers)
            {
                foreach (var hiddenNeuron in hiddenLayer.Neurons.Cast <HiddenNeuron>())
                {
                    genom.Gens[genIndex] = hiddenNeuron.Bias;
                    genIndex++;

                    foreach (var axon in hiddenNeuron.Inputs)
                    {
                        genom.Gens[genIndex] = axon.Weight;
                        genIndex++;
                    }
                }
            }

            foreach (var outputNeuron in nn.OutputLayer.Neurons.Cast <OutputNeuron>())
            {
                genom.Gens[genIndex] = outputNeuron.Bias;
                genIndex++;

                foreach (var axon in outputNeuron.Inputs)
                {
                    genom.Gens[genIndex] = axon.Weight;
                    genIndex++;
                }
            }
        }
        public void BuildFromDNA_Should_Set_Input_Dendrites_Weights_To_One()
        {
            NeuralNetwork.NeuralNetwork nn      = new NeuralNetwork.NeuralNetwork();
            int           inputCount            = 2;
            int           outputCount           = 2;
            int           depth                 = 1;
            int           hiddenNeuronsPerLayer = 2;
            List <double> weights               = new List <double>
            {
                1, 2,
                3, 4,

                5, 6,
                7, 8,
            };
            string DNA = $"{inputCount}||{outputCount}||{depth}||{hiddenNeuronsPerLayer}||{string.Join("||", weights)}";

            nn.BuildFromDNA(DNA);

            foreach (Dendrite d in nn.InputLayer.Neurons.SelectMany(n => n.Dendrites))
            {
                d.Weight.Should().Be(1);
            }
        }
        public void BuildFromDNA_Should_Create_InputLayer_Neurons_With_One_Dendrite_Each()
        {
            NeuralNetwork.NeuralNetwork nn      = new NeuralNetwork.NeuralNetwork();
            int           inputCount            = 2;
            int           outputCount           = 2;
            int           depth                 = 1;
            int           hiddenNeuronsPerLayer = 2;
            List <double> weights               = new List <double>
            {
                1, 2,
                3, 4,

                5, 6,
                7, 8,
            };
            string DNA = $"{inputCount}||{outputCount}||{depth}||{hiddenNeuronsPerLayer}||{string.Join("||", weights)}";

            nn.BuildFromDNA(DNA);

            foreach (Neuron n in nn.InputLayer.Neurons)
            {
                n.Dendrites.Should().HaveCount(1);
            }
        }
 public void LoadNetwork()
 {
     try
     {
         neuralNetwork = NeuralNetwork.NeuralNetwork.load(networkDataFile);
     }
     catch (System.Exception)
     {
         Debug.LogWarning("Cannot open file: " + networkDataFile);
     }
 }
 void Awake()
 {
     neuralNetwork = new NeuralNetwork.NeuralNetwork(inputSize, new int[] { 42, 24, 6 });
     LoadLearnData();
     LoadNetwork();
 }
		/// <summary>
		/// Learning algorithm constructor
		/// </summary>
		/// <param name="n">The neural network to train</param>
		public LearningAlgorithm(NeuralNetwork n) 
		{
			nn = n;
		}
			/// <summary>
			/// Build a new Genetic NeuralNetwork from the Neural Network given as parameter
			/// </summary>
			/// <param name="n">The neural network model</param>
			public GeneticNeuralNetwork(NeuralNetwork n) 
			{
				nn = n;
				int size = 0;
				for(int i=0; i<nn.N_Layers; i++)
					size += (nn[i].N_Inputs+1) * nn[i].N_Neurons;
				genes = new float[size];
			}
		/// <summary>
		/// Build a new BackPropagation learning algorithm instance
		/// with alpha = 0,5 and gamma = 0,3
		/// </summary>
		/// <param name="nn">The neural network to train</param>
		public BackPropagationLearningAlgorithm(NeuralNetwork nn) : base(nn) 
		{
		}
Beispiel #40
0
        static void XORTest()
        {
            var data = new List<List<double>>
            {
                new List<double> { 0, 0 },
                new List<double> { 0, 1 },
                new List<double> { 1, 0 },
                new List<double> { 1, 1 }
            };

            int[] labels =
            {
                0,
                1,
                1,
                0
            };

            var tests = new List<List<double>>
            {
                new List<double> { 0, 0 },
                new List<double> { 0, 1 },
                new List<double> { 1, 0 },
                new List<double> { 1, 1 }
            };

            var neuralNetwork = new NeuralNetwork<int>(data, labels);

            foreach (var test in tests)
            {
                Console.WriteLine(string.Join(" ", test) + " is " + neuralNetwork.Predict(test));
            }

            Console.Read();
        }
		/// <summary>
		/// GeneticLearningAlgorithm constructor
		/// </summary>
		/// <param name="nn">The neural network to train</param>
		public GeneticLearningAlgorithm(NeuralNetwork nn) : base(nn) 
		{
			population = new ArrayList();
			for(int i=0; i<POPULATION_SIZE; i++)
				population.Add(Muted_NeuralNetwork);
		}
Beispiel #42
0
        static void ColorTest()
        {
            var data = new List<List<double>>
            {
                new List<double> { 255, 255, 255 },
                new List<double> { 255, 245, 245 },
                new List<double> { 245, 242, 250 },

                new List<double> { 255, 0, 0 },
                new List<double> { 240, 4, 3 },
                new List<double> { 250, 10, 11 },

                new List<double> { 0, 255, 0 },
                new List<double> { 12, 243, 10 },
                new List<double> { 4, 250, 3 },

                new List<double> { 0, 0, 255 },
                new List<double> { 12, 10, 235 },
                new List<double> { 8, 11, 240 },

                new List<double> { 255, 255, 0 },
                new List<double> { 254, 245, 10 },
                new List<double> { 248, 249, 7 },

                new List<double> { 255, 0, 255 },
                new List<double> { 235, 10, 240 },
                new List<double> { 241, 8, 233 },
                new List<double> { 200, 10, 240 },
                new List<double> { 160, 4, 200 },
                new List<double> { 153, 7, 160 },

                new List<double> { 0, 255, 255 },
                new List<double> { 15, 240, 241 },
                new List<double> { 7, 231, 226 },

                new List<double> { 103, 61, 35 },
                new List<double> { 145, 87, 49 },
                new List<double> { 101, 58, 31 },

                new List<double> { 123, 120, 121 },
                new List<double> { 131, 131, 132 },
                new List<double> { 120, 120, 120 },

                new List<double> { 11, 6, 13 },
                new List<double> { 3, 4, 2 },
                new List<double> { 0, 0, 0 }
            };

            string[] labels =
            {
                "White",
                "White",
                "White",

                "Red",
                "Red",
                "Red",

                "Green",
                "Green",
                "Green",

                "Blue",
                "Blue",
                "Blue",

                "Yellow",
                "Yellow",
                "Yellow",

                "Purple",
                "Purple",
                "Purple",
                "Purple",
                "Purple",
                "Purple",

                "Cyan",
                "Cyan",
                "Cyan",

                "Brown",
                "Brown",
                "Brown",

                "Gray",
                "Gray",
                "Gray",

                "Black",
                "Black",
                "Black"
            };

            var tests = new List<List<double>>
            {
                new List<double> { 250, 250, 250 },
                new List<double> { 235, 250, 246 },
                new List<double> { 255, 26, 26 },
                new List<double> { 235, 15, 92 },
                new List<double> { 68, 184, 31 },
                new List<double> { 93, 255, 94 },
                new List<double> { 35, 64, 249 },
                new List<double> { 15, 29, 202 },
                new List<double> { 249, 250, 3 },
                new List<double> { 255, 251, 40 },
                new List<double> { 245, 0, 245 },
                new List<double> { 250, 20, 250 },
                new List<double> { 0, 255, 255 },
                new List<double> { 45, 215, 223 },
                new List<double> { 123, 111, 130 },
                new List<double> { 121, 121, 121 },
                new List<double> { 8, 8, 8 },
                new List<double> { 25, 15, 38 }
            };

            var neuralNetwork = new NeuralNetwork<string>(data, labels, 500, 1, 4);

            foreach (var test in tests)
            {
                Console.WriteLine(string.Join(" ", test) + " is " + neuralNetwork.Predict(test));
            }

            string input;

            while ((input = Console.ReadLine()) != "")
            {
                var color = input.Split(' ');
                double red = double.Parse(color[0]);
                double green = double.Parse(color[1]);
                double blue = double.Parse(color[2]);
                var test = new List<double> { red, green, blue };

                Console.WriteLine(string.Join(" ", test) + " is " + neuralNetwork.Predict(test));
            }
        }