Beispiel #1
0
        public float[] FeedForward(float[] inputs)
        {
            for (int i = 0; i < inputs.Length; i++)
            {
                this.Neurons[0][i] = inputs[i];
            }

            for (int i = 1; i < this.LayerCount; i++)
            {
                for (int j = 0; j < this.Neurons[i].Length; j++)
                {
                    float value = 0f;

                    for (int k = 0; k < this.Neurons[i - 1].Length; k++)
                    {
                        value += this.Weights[i - 1][j][k] * this.Neurons[i - 1][k];
                    }

                    this.Neurons[i][j] = NeuroHelper.Sigmoid(value);
                }
            }

            this.FeedForwardFinished?.Invoke(this, new FeedForwardFinishedEventArgs(this.Outputs));
            return(this.Outputs);
        }
Beispiel #2
0
        private void InitWeightsAndBiases(float[][][] presetWeights = null, float[][] presetBiases = null)
        {
            List <float[][]> weights = new List <float[][]>();
            List <float[]>   biases  = new List <float[]>();

            for (int i = 1; i < this.layers.Length; i++)
            {
                List <float[]> layerWeights = new List <float[]>();
                float[]        layerBiases  = new float[this.layers[i]];

                for (int j = 0; j < this.layers[i]; j++)
                {
                    float[] neuronWeights = new float[this.layers[i - 1]];
                    if (presetBiases != null && presetBiases.Length >= i && presetBiases[i - 1].Length >= j)
                    {
                        layerBiases[j] = presetBiases[i - 1][j];
                    }
                    else
                    {
                        layerBiases[j] = NeuroHelper.RandomNext();
                    }

                    for (int k = 0; k < neuronWeights.Length; k++)
                    {
                        if (presetWeights != null && presetWeights.Length >= i && presetWeights[i - 1].Length >= j && presetWeights[i - 1][j].Length >= k)
                        {
                            neuronWeights[k] = presetWeights[i - 1][j][k];
                        }
                        else
                        {
                            neuronWeights[k] = NeuroHelper.RandomNext();
                        }
                    }
                    layerWeights.Add(neuronWeights);
                }
                biases.Add(layerBiases);
                weights.Add(layerWeights.ToArray());
            }

            this.Weights = weights.ToArray();
            this.Biases  = biases.ToArray();
        }
Beispiel #3
0
        public void Mutate()
        {
            for (int i = 1; i < this.LayerCount; i++)
            {
                if (NeuroHelper.RandomNext(0f, 1f) <= NeuroHelper.LayerMutationChance)
                {
                    List <float[]> neurons = new List <float[]>();
                    for (int l = 0; l < this.layers.Length; l++)
                    {
                        float[] layer = new float[this.layers[l] + 1];
                        for (int j = 0; j < this.layers[l]; j++)
                        {
                            layer[j] = 0f;
                        }
                        neurons.Add(layer);
                    }
                    this.Neurons = neurons.ToArray();

                    this.layers[i]++;                     // increase the neuroncount of the layer
                    this.InitWeightsAndBiases(this.Weights, this.Biases);
                }

                if (NeuroHelper.RandomNext(0f, 1f) <= NeuroHelper.NeuronWeightMutationChance)
                {
                    int neuronIndex = NeuroHelper.RandomNext(0, this.Neurons[i].Length);
                    int weightIndex = NeuroHelper.RandomNext(0, this.Weights[i - 1][neuronIndex].Length);

                    if (weightIndex < this.Weights.Length)
                    {
                        this.Weights[i - 1][neuronIndex][weightIndex] += NeuroHelper.RandomNext(-NeuroHelper.NeuronWeightMutationDefaultValue, NeuroHelper.NeuronWeightMutationDefaultValue);
                    }
                }

                if (NeuroHelper.RandomNext(0f, 1f) <= NeuroHelper.NeuronBiasMutationChance)
                {
                    int neuronIndex = NeuroHelper.RandomNext(0, this.Neurons[i].Length);

                    this.Biases[i - 1][neuronIndex] += NeuroHelper.RandomNext(-NeuroHelper.NeuronBiasMutationDefaultValue, NeuroHelper.NeuronBiasMutationDefaultValue);
                }
            }
        }
Beispiel #4
0
        public static void Selection(Dictionary <NeuroNet, int> scoredNeuroNets)
        {
            IEnumerable <KeyValuePair <NeuroNet, int> > sortedBrains = scoredNeuroNets.OrderBy(x => x.Value);

            for (int i = 0; i < sortedBrains.Count(); i++)
            {
                if (i <= sortedBrains.Count() / 2 && NeuroHelper.RandomNext(0, sortedBrains.Count() * 2) < ((sortedBrains.Count() * 2 - 2) - i * 2))
                {
                    if (scoredNeuroNets.Count >= 2)
                    {
                        scoredNeuroNets.Remove(sortedBrains.ElementAt(i).Key);
                    }
                }
                if (i > sortedBrains.Count() / 2 && NeuroHelper.RandomNext(0, sortedBrains.Count() * 2) < (sortedBrains.Count() * 2) - i * 2)
                {
                    if (scoredNeuroNets.Count >= 2)
                    {
                        scoredNeuroNets.Remove(sortedBrains.ElementAt(i).Key);
                    }
                }
            }
        }
Beispiel #5
0
        public static NeuroNet Crossover(NeuroNet mother, NeuroNet father)
        {
            NeuroNet child = InitChild(mother, father);

            for (int i = 0; i < child.Weights.Length; i++)
            {
                for (int j = 0; j < child.Weights[i].Length; j++)
                {
                    for (int k = 0; k < child.Weights[i][j].Length; k++)
                    {
                        if (!(k >= mother.Weights[i][j].Length && mother.Weights[i][j].Length < father.Weights[i][j].Length) &&
                            ((k >= father.Weights[i][j].Length && mother.Weights[i][j].Length > father.Weights[i][j].Length) || NeuroHelper.RandomNext(0, 100) > 50))
                        {
                            child.Weights[i][j][k] = mother.Weights[i][j][k];
                        }
                        else
                        {
                            child.Weights[i][j][k] = father.Weights[i][j][k];
                        }
                    }

                    if (!(j >= mother.Biases[i].Length && mother.Biases[i].Length < father.Biases[i].Length) &&
                        ((j >= father.Biases[i].Length && mother.Biases[i].Length > father.Biases[i].Length) || NeuroHelper.RandomNext(0, 100) > 50))
                    {
                        child.Biases[i][j] = mother.Biases[i][j];
                    }
                    else
                    {
                        child.Biases[i][j] = father.Biases[i][j];
                    }
                }
            }

            return(child);
        }