コード例 #1
0
 /// <summary>
 /// Load neural network from file
 /// (Recreates and retrain if settings are different. Releases training set.)
 /// </summary>
 public MyNeuralNetwork()
 {
     StopLearning();
     m_curves = new Dictionary<string, ClassicCurve>();
     m_network = FileOptions.LoadNeuralNetwork();
     //m_curves = FileOptions.LoadTrainingSet(true);
 }
コード例 #2
0
ファイル: AIConfig.cs プロジェクト: jimc1664/Tankenstein
    NeuralNetwork.Synapsis[] extendArray(NeuralNetwork.Network nn, int i, int nc)
    {
        var old = nn.Synapsis[i];

        nn.Synapsis[i] = new NeuralNetwork.Synapsis[nc];
        old.CopyTo(nn.Synapsis[i], 0);
        return(nn.Synapsis[i]);
    }
コード例 #3
0
ファイル: Main.cs プロジェクト: mihaistancu/NeuralNetworks
        private void UpdateNetwork()
        {
            var inputLayer = grid.Rows * grid.Columns;
            var hiddenLayer = Int32.Parse(hiddenLayerTextBox.Text);
            const int outputLayer = 10;

            network = new Network(new[] {inputLayer, hiddenLayer, outputLayer});
        }
コード例 #4
0
    public void ExportGeneration()
    {
        NeuralNetwork.Network[] generationPopulation = new NeuralNetwork.Network[networkPopulation.Count];

        for (int i = 0; i < networkPopulation.Count; i++)
        {
            generationPopulation[i] = networkPopulation[i].network;
        }

        NetworkExporter.ExportGeneration(generationPopulation, generation);
    }
コード例 #5
0
ファイル: Trainer.cs プロジェクト: camander321/NeuralNetwork
    // Use this for initialization
    void Start()
    {
        foreach (Transform child in transform)
        {
            arms.Add(child);
        }
        ImageInput.width  = ImageWidth;
        ImageInput.height = ImageHeight;

        int numInputs  = ImageWidth * ImageHeight * 4;
        int numOutputs = 3 * arms.Count;

        network = new NeuralNetwork.Network(new int[] { numInputs, 12, numOutputs });
    }
コード例 #6
0
    // Update is called once per frame
    protected void Update()
    {
        if (network == null)
        {
            network = GetComponentInChildren <NeuralNetwork.Network>();
            return;
        }

        network.Compute();
        onSimulationStep.Invoke();
        if (!gameOver)
        {
            step++;
        }
    }
コード例 #7
0
    NeuralNetwork.Network CreateNewNetwork()
    {
        //Create a new
        NeuralNetwork.Network newNetwork = Instantiate(neuralNetworkPrefab, transform);
        newNetwork.inputLayer = input;
        newNetwork.onOutput.AddListener(output.SetFly);
        newNetwork.InitializeConnections();

        //Add the new network to the list of networks
        NetworkIndividual newIndividual = new NetworkIndividual();

        newIndividual.network = newNetwork;
        networkPopulation.Add(newIndividual);

        return(newNetwork);
    }
コード例 #8
0
ファイル: Program.cs プロジェクト: mihaistancu/NeuralNetworks
        static void Main(string[] args)
        {
            var mnistDataParser = new MnistDataParser();
            var trainingRecords = mnistDataParser.Parse("d:\\train-images", "d:\\train-labels");
            var testRecords = mnistDataParser.Parse("d:\\test-images", "d:\\test-labels");

            var neuralNetwork = new Network(new[] {784, 30, 10});
            var benchmark = new Benchmark(neuralNetwork);

            for (double accuracy = 0; accuracy < .9;)
            {
                accuracy = benchmark.AccuracyFor(testRecords);
                Console.WriteLine("Success rate: {0} %", accuracy*100);

                neuralNetwork.Train(trainingRecords, .01, 10);
            }
        }
コード例 #9
0
    public IEnumerator RunSimulation(NeuralNetwork.Network neuralNetwork, float deltaTime)
    {
        step     = 0;
        gameOver = false;
        while (!gameOver)
        {
            neuralNetwork.Compute();

            onSimulationStep.Invoke();
            step++;

            if (deltaTime > 0)
            {
                yield return(new WaitForSeconds(deltaTime));
            }
        }

        yield return(null);
    }
コード例 #10
0
ファイル: AIConfig.cs プロジェクト: jimc1664/Tankenstein
    public void init(NeuralNetwork.Network nn, int scannerRC)
    {
        ScanRc     = scannerRC;
        In_Neurons = In_Synapsis = 0;

        int sn0c = nn.Synapsis[StartLayer - 1].Length, sn0i = sn0c;

        forEach_Input((int startOff, int cnt) => { In_Neurons += cnt; });

        sn0c += (In_Synapsis = In_Neurons * Layers[0].Cnt);
        var synA = extendArray(nn, StartLayer - 1, sn0c);

        forEach_Input((int startOff, int cnt) => {
            for (int i = cnt; i-- > 0;)
            {
                for (int j = Layers[0].Cnt; j-- > 0;)
                {
                    synA[sn0i++] = nn.setSynapsis(0, startOff + i, StartLayer, Layers[0].Ni1 + j);
                }
            }
        });



        int snOc = nn.Synapsis[nn.Synapsis.Length - 1].Length, snOi = snOc;

        Out_Neurons = Out_Synapsis = 0;
        forEach_Output((int startOff, int cnt) => { Out_Neurons += cnt; });

        snOc += (Out_Synapsis = Out_Neurons * Layers[Layers.Count - 1].Cnt);
        var synO = extendArray(nn, nn.Synapsis.Length - 1, snOc);

        forEach_Output((int startOff, int cnt) => {
            for (int i = cnt; i-- > 0;)
            {
                for (int j = Layers[Layers.Count - 1].Cnt; j-- > 0;)
                {
                    synO[snOi++] = nn.setSynapsis(StartLayer + Layers.Count - 1, Layers[Layers.Count - 1].Ni1 + j, nn.Synapsis.Length, startOff + i);
                }
            }
        });
    }
コード例 #11
0
        /// <summary>
        /// Creates copy of neural network and curves in new instance
        /// </summary>
        /// <param name="network"></param>
        public MyNeuralNetwork(MyNeuralNetwork network)
        {            
            m_network = new Network(network.m_network);
            m_curves = new Dictionary<string, ClassicCurve>();
            
            //necessary to do proper copy, otherwise problems with NnIndex (stayed modyfied when Form_modify canceled)
            foreach (ClassicCurve curve in network.Curves.Values)
                m_curves.Add(curve.ID, new ClassicCurve(curve));

            if (network.m_curveSets != null)
            {
                m_curveSets = new Dictionary<string, MyCurve>();
                foreach (MyCurve curve in network.m_curveSets.Values)
                    m_curveSets.Add(curve.ID, new MyCurve(curve));
                

                //m_curveSets = new List<MyCurve>(network.m_curveSets);
            }
        }
コード例 #12
0
 public NetworkVisualizer(Network network, double[,,] input)
 {
     this.network = network;
     this.input   = input;
 }
コード例 #13
0
 public Benchmark(Network network)
 {
     this.network = network;
 }
コード例 #14
0
    public void init(AIConfig[] aic)
    {
        if (aic == null || aic.Length == 0)   //old
        {
            int[] layers = { Input.Length, 8, 4 };
            try {
                var r = Random.value; //lazy..
                NN = new NeuralNetwork.Network(layers, true, Random.seed);
                if (loadWeight)
                {
                    NN.Weights = LoadWeights();
                }
            } catch (System.Exception e) {
                Debug.LogError("NN err: " + e.Message);
            }
        }
        else
        {
            int mxLayer = -1;
            foreach (var c in aic)
            {
                mxLayer = Mathf.Max(mxLayer, c.Layers.Count + c.StartLayer);
            }

            int[] layers = new int[mxLayer + 1];

            layers[0]       = Input.Length;
            layers[mxLayer] = 4;


            foreach (var c in aic)
            {
                if (c.enabled)
                {
                    for (int i = c.Layers.Count; i-- > 0;)
                    {
                        c.Layers[i].Ni1           = layers[c.StartLayer + i];
                        layers[c.StartLayer + i] += c.Layers[i].Cnt;
                    }
                }
            }

            Debug.Log("lc  " + layers.Length);

            foreach (int i in layers)
            {
                Debug.Log("  " + i);
            }

            //  try {
            var r = Random.value;     //lazy..
            NN = new NeuralNetwork.Network(layers, Random.seed);


            foreach (var c in aic)
            {
                if (c.enabled)
                {
                    c.init(NN, Motor.Scanner.Count);
                }
            }

            var output = NN.Compute(Input);

            /*if(loadWeight) {
             *  NN.Weights = LoadWeights();
             * } */
            //} catch(System.Exception e) {

            //   Debug.LogError("NN err 1: " + e.Message);
            // }
        }
    }
コード例 #15
0
 public void init(NeuralNetwork.Network nn)
 {
     //NN = nn;
     NN = new NeuralNetwork.Network(nn);
 }
コード例 #16
0
    void MutateGene(NeuralNetwork.Network networkToMutate)
    {
        int geneCount = networkToMutate.GetGeneCount();

        for (int i = 0; i < geneToMutate; i++)
        {
            int geneToTransferId = Random.Range(0, geneCount);

            //Look in HiddenLayers
            for (int l = 0; l < networkToMutate.hiddenLayers.Length; l++)
            {
                for (int n = 0; n < networkToMutate.hiddenLayers[l].neurons.Length; n++)
                {
                    int neuronGeneCount = networkToMutate.hiddenLayers[l].neurons[n].GetGeneCount();

                    //If the gene isn't in this neuron, continue to search
                    if (geneToTransferId >= neuronGeneCount)
                    {
                        geneToTransferId -= neuronGeneCount;
                    }
                    //If it's the last one, transfer the activation value
                    else if (geneToTransferId == neuronGeneCount - 1)
                    {
                        networkToMutate.hiddenLayers[l].neurons[n].activationValue += Random.Range(-1, 1);
                        geneToTransferId = -1;
                        break;
                    }
                    //Else tranfer the weight
                    else
                    {
                        networkToMutate.hiddenLayers[l].neurons[n].weights[geneToTransferId] += Random.Range(-1, 1);
                        geneToTransferId = -1;
                        break;
                    }
                }
                if (geneToTransferId < 0)
                {
                    break;
                }
            }

            //Look in the output layer
            if (geneToTransferId >= 0)
            {
                for (int n = 0; n < networkToMutate.outputLayer.neurons.Length; n++)
                {
                    int neuronGeneCount = networkToMutate.outputLayer.neurons[n].GetGeneCount();

                    //If the gene isn't in this neuron, continue to search
                    if (geneToTransferId >= neuronGeneCount)
                    {
                        geneToTransferId -= neuronGeneCount;
                    }
                    //If it's the last one, transfer the activation value
                    else if (geneToTransferId == neuronGeneCount - 1)
                    {
                        networkToMutate.outputLayer.neurons[n].activationValue += Random.Range(-1, 1);
                        geneToTransferId = -1;
                        break;
                    }
                    //Else tranfer the weight
                    else
                    {
                        networkToMutate.outputLayer.neurons[n].weights[geneToTransferId] += Random.Range(-1, 1);
                        geneToTransferId = -1;
                        break;
                    }
                }
            }
        }
    }
コード例 #17
0
ファイル: Game1.cs プロジェクト: thedanieldude1/DanielCode
        protected override void Update(GameTime gameTime)
        {
            this.TargetElapsedTime = TimeSpan.FromMilliseconds(2f);
            
            // Allows the game to exit
            if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed)
                this.Exit();
            if (Agents.Count < 20)
            {
                if (BestBrain != null&&secondBestBrain!=null)
                {
                    Agent a = new Agent(new Random(), this);
                    string rep = GenomeEncoder.Encode(BestBrain);
                    //Console.WriteLine(rep.Length / 32);
                    string rep2 = GenomeEncoder.Encode(secondBestBrain);
                    string dr = GenomeEncoder.Combine(rep, rep2);
                    //Console.WriteLine(dr);
                    a.brain = GenomeEncoder.Decode(dr);
                    //Console.WriteLine(dr.Length - rep.Length);
                    Agents.Add(a);
                }
                else if (!firstSpawned)
                {
                    Agents.Add(new Agent(new Random(), this));
                }
            }
            else
            {
                if (!firstSpawned)
                {
                    firstSpawned = true;
                }
            }
            if (Keyboard.GetState(PlayerIndex.One).IsKeyDown(Keys.B))
            {
                if (!justpressedA){
                    isPaused = !isPaused;
                    if (isPaused)
                    {
                        Console.WriteLine("Current Best Fitness: " + BestFit);
                        XmlSerializer ser = new XmlSerializer(typeof(Network));
                        Console.WriteLine(BestBrain.isTraining);
                        TextWriter writer = new StreamWriter(@"Ihateyou.txt");
                        ser.Serialize(writer, BestBrain);
                        string all = GenomeEncoder.Encode(BestBrain);
                        //Console.WriteLine(.Length/32);
                        Network x = GenomeEncoder.Decode(all);
                        //if(x.HiddenLayer1[2].TargetSynapses[0].Weight==BestBrain.HiddenLayer1[2].TargetSynapses[0].Weight&& x.Inputs[2].TargetSynapses[0].Weight == BestBrain.Inputs[2].TargetSynapses[0].Weight && x.HiddenLayer2[2].RecursiveSynapses[0].Weight == BestBrain.HiddenLayer2[2].RecursiveSynapses[0].Weight)
                       // {
                       //     Console.WriteLine("I guess they are equal I guess");
                        //}
                       // byte[] r = BitConverter.GetBytes(0.34f);
                       // BitArray a = new BitArray(r);
                        //Console.WriteLine(BitConverter.ToSingle(GenomeEncoder.ConvertToByte(a),0));
                    }
                    justpressedA = true;
                }
            }
            else
            {
                justpressedA = false;
            }
            if (Keyboard.GetState(PlayerIndex.One).IsKeyDown(Keys.A))
            {
                steps = 1;
                Console.WriteLine("B Pressed");
            }
            if (!isPaused) { 
            
                for (int x = 0; x < steps;x++){

                    for (int i = 0; i < Agents.Count; i++)
                    {
                        var n = Agents[i];
                        if (n.Health <= 0)
                        {
                            //if (n.Dead != true)
                            // {
                            //     Agents.Add(new Agent(new Random(), this));
                            //    n.Dead = true;
                            // }
                            //Console.WriteLine(i + "'s fitness was " + n.Fitness);
                            if (n.Fitness > BestFit) {
                                secondBestBrain = BestBrain;
                                SecondBestFit = BestFit;
                                BestFit = n.Fitness;
                                BestBrain = n.brain;
                                
                            }
                            else if(n.Fitness>SecondBestFit)
                            {
                                secondBestBrain = n.brain;
                                SecondBestFit = n.Fitness;
                                //Console.WriteLine("Found second: " + n.Fitness);
                            }
                            
                            Agents.Remove(n);
                            continue;

                        }

                        n.Tick();

                    }
                }
            }
            // TODO: Add your update logic here
            //Angle = (Angle + 1)%360;
            base.Update(gameTime);
        }
コード例 #18
0
ファイル: Test.cs プロジェクト: jimc1664/Tankenstein
    //todo - class-ify optimiser so it can haz parameters and not look shit  -- also, ya know, actually do it properly
    void geneticOptimisation(Scorer t1, Scorer t2)
    {
        NeuralNetwork.Network n1 = t1.Ctrl.NN, n2 = t2.Ctrl.NN;

        if (n1 != n2)
        {
            n2.copyTo(n1);
        }

        /*
         * for(int layer = n1.Neurons.Length; --layer > 0; ) {
         * int nl = n1.Neurons[layer].Length;
         * for(int ri = Mathf.CeilToInt((float)nl * Random.Range(0.7f, 0.9f)  ); ri-- > 0; ) {
         *    int ni = Random.Range(0, nl);  //chance of duplicate - i suspect tracking this would be more cost than worth
         *    n1.Neurons[layer][ni].bias = n2.Neurons[layer][ni].bias;
         * }
         * var s1 = n1.Synapsis[layer - 1];
         * var s2 = n2.Synapsis[layer - 1];
         * int sl = s1.Length;
         * for(int ri = Mathf.CeilToInt((float)s1.Length * Random.Range(0.7f, 0.9f) ); ri-- > 0; ) {
         *    int si = Random.Range(0, s1.Length);  //chance of duplicate - i suspect tracking this would be more cost than worth
         *    s1[si].weight = s2[si].weight;
         * }
         * } */

        int nl = n1.Neurons.Length;

        for (int ri = Mathf.CeilToInt((float)nl * Random.Range(0.0f, 0.1f)); ri-- > 0;) //todo -- this now affecting input biases (does nothing)
        {
            int ni = Random.Range(0, nl);                                               //chance of duplicate - i suspect tracking this would be more cost than worth
            var m  = Random.Range(-1.0f, 1.0f);
            if (Random.value > 0.9f)
            {
                n1.Neurons[ni].bias = m;
            }
            else
            {
                m *= Mathf.Abs(m);
                n1.Neurons[ni].bias = n1.Neurons[ni].bias + m * 0.05f; //NeuralNetwork.ActivationMethods.HyperbolidTangent(
            }
        }
        for (int layer = n1.Synapsis.Length; layer-- > 0;)
        {
            var s1 = n1.Synapsis[layer];
            int sl = s1.Length;
            for (int ri = Mathf.CeilToInt((float)s1.Length * Random.Range(0.0f, 0.1f)); ri-- > 0;)
            {
                int si = Random.Range(0, s1.Length);  //chance of duplicate - i suspect tracking this would be more cost than worth
                var m  = Random.Range(-1.0f, 1.0f);
                if (Random.value > 0.9f)
                {
                    s1[si].weight = m;
                }
                else
                {
                    m            *= Mathf.Abs(m);
                    s1[si].weight = s1[si].weight + m * 0.05f; // NeuralNetwork.ActivationMethods.HyperbolidTangent(
                }
            }
        }
    }
コード例 #19
0
 public static string Encode(Network input)
 {
     StringBuilder output = new StringBuilder();
     foreach(Neuron n in input.Inputs)
     {
         foreach(Synapse t in n.TargetSynapses)
         {
             BitArray x = new BitArray(BitConverter.GetBytes(t.Weight));
             foreach(bool i in x)
             {
                 output.Append(i?"1":"0");
             }
         }
         
     }
     foreach (Neuron n in input.HiddenLayer1)
     {
         foreach (Synapse t in n.TargetSynapses)
         {
             BitArray u = new BitArray(BitConverter.GetBytes(t.Weight));
             foreach (bool i in u)
             {
                 output.Append(i ? "1" : "0");
             }
         }
         foreach (Synapse t in n.RecursiveSynapses)
         {
             BitArray u = new BitArray(BitConverter.GetBytes(t.Weight));
             foreach (bool i in u)
             {
                 output.Append(i ? "1" : "0");
             }
         }
         BitArray x = new BitArray(BitConverter.GetBytes(n.Threshold));
         foreach (bool i in x)
         {
             output.Append(i ? "1" : "0");
         }
     }
     foreach (Neuron n in input.HiddenLayer2)
     {
         foreach (Synapse t in n.TargetSynapses)
         {
             BitArray g = new BitArray(BitConverter.GetBytes(t.Weight));
             foreach (bool i in g)
             {
                 output.Append(i ? "1" : "0");
             }
             
         }
         foreach (Synapse t in n.RecursiveSynapses)
         {
             BitArray x = new BitArray(BitConverter.GetBytes(t.Weight));
             foreach (bool i in x)
             {
                 output.Append(i ? "1" : "0");
             }
         }
         BitArray u = new BitArray(BitConverter.GetBytes(n.Threshold));
         foreach (bool i in u)
         {
             output.Append(i ? "1" : "0");
         }
     }
     foreach (Neuron n in input.HiddenLayer3)
     {
         foreach (Synapse t in n.TargetSynapses)
         {
             BitArray u= new BitArray(BitConverter.GetBytes(t.Weight));
             foreach (bool i in u)
             {
                 output.Append(i ? "1" : "0");
                 
             }
             //Console.WriteLine(t.Weight);
         }
         foreach (Synapse t in n.RecursiveSynapses)
         {
             BitArray u = new BitArray(BitConverter.GetBytes(t.Weight));
             foreach (bool i in u)
             {
                 output.Append(i ? "1" : "0");
             }
         }
         
         BitArray x = new BitArray(BitConverter.GetBytes(n.Threshold));
         foreach (bool i in x)
         {
             output.Append(i ? "1" : "0");
         }
         //Console.WriteLine(n.Threshold);
     }
     foreach (Neuron n in input.Outputs)
     {
         BitArray u = new BitArray(BitConverter.GetBytes(n.Threshold));
         foreach (bool i in u)
         {
             output.Append(i ? "1" : "0");
         }
         //Console.WriteLine(n.Threshold);
     }
     
     return output.ToString();
 }
コード例 #20
0
    void Breed(NeuralNetwork.Network mother, NeuralNetwork.Network father, NeuralNetwork.Network child)
    {
        //For each hidden layer
        for (int l = 0; l < mother.hiddenLayers.Length; l++)
        {
            //For each neuron in the layer
            for (int n = 0; n < mother.hiddenLayers[l].neurons.Length; n++)
            {
                //For each weights
                for (int w = 0; w < mother.hiddenLayers[l].neurons[n].weights.Length; w++)
                {
                    //Take randomly the gene from the father or the mother
                    if (Random.Range(0.0f, 1.0f) > 0.5f)
                    {
                        child.hiddenLayers[l].neurons[n].weights[w] = mother.hiddenLayers[l].neurons[n].weights[w];
                    }
                    else
                    {
                        child.hiddenLayers[l].neurons[n].weights[w] = father.hiddenLayers[l].neurons[n].weights[w];
                    }
                }

                //Same for activation value of the neuron
                if (Random.Range(0.0f, 1.0f) > 0.5f)
                {
                    child.hiddenLayers[l].neurons[n].activationValue = mother.hiddenLayers[l].neurons[n].activationValue;
                }
                else
                {
                    child.hiddenLayers[l].neurons[n].activationValue = father.hiddenLayers[l].neurons[n].activationValue;
                }
            }
        }

        ///Output Layer
        //For each neuron in the layer
        for (int n = 0; n < mother.outputLayer.neurons.Length; n++)
        {
            //For each weights
            for (int w = 0; w < mother.outputLayer.neurons[n].weights.Length; w++)
            {
                //Take randomly the gene from the father or the mother
                if (Random.Range(0.0f, 1.0f) > 0.5f)
                {
                    child.outputLayer.neurons[n].weights[w] = mother.outputLayer.neurons[n].weights[w];
                }
                else
                {
                    child.outputLayer.neurons[n].weights[w] = father.outputLayer.neurons[n].weights[w];
                }
            }

            //Same for activation value of the neuron
            if (Random.Range(0.0f, 1.0f) > 0.5f)
            {
                child.outputLayer.neurons[n].activationValue = mother.outputLayer.neurons[n].activationValue;
            }
            else
            {
                child.outputLayer.neurons[n].activationValue = father.outputLayer.neurons[n].activationValue;
            }
        }
    }
コード例 #21
0
 public Network(Network network)
 {
     m_layers = new Layer[network.m_layers.Length];
     for (int i = 0; i < m_layers.Length; i++)
         m_layers[i] = new Layer(network[i]);            
 }
コード例 #22
0
        /// <summary>
        /// Creates default neural network accroding to settings
        /// </summary>
        private void InitializeNetwork()
        {
            double[][] thresholds = new double[][] { new double[m_hiddenLayerSize], new double[m_outputSize] };
            
            for (int i = 0; i < thresholds.Length; i++)
                for (int j = 0; j < thresholds[i].Length; j++)
                    thresholds[i][j] = StaticRandom.RandomDouble() * (-1);

            m_network = new Network(m_inputSize, new int[] { m_hiddenLayerSize, m_outputSize }, thresholds,
                new IActivationFunction[] { new HyperbolicTangentFunction(), new SigmoidFunction() });
        }