Ejemplo n.º 1
0
 public Connection(Neuron n, double _weight)
 {
     sourceNeuron  = n;
     parentNetwork = n.parentNetwork;
     weight        = _weight;
 }
Ejemplo n.º 2
0
 /// <summary>
 /// Removes a neuron from the hidden layer given as absolute layernumber by parameter layer
 /// </summary>
 /// <param name="n"></param>
 /// <param name="layer">Absolute layer number</param>
 public void RemoveHiddenNeuron(Neuron n, int layer)
 {
     layers.ElementAt(layer).Remove(n);
 }
Ejemplo n.º 3
0
 public void AddConnection(Neuron n, double w)
 {
     AddConnection(new Connection(n, w));
 }
Ejemplo n.º 4
0
 public void RemoveInputNeuron(Neuron n)
 {
     layers.ElementAt(0).Remove(n);
 }
Ejemplo n.º 5
0
 public void RemoveOutpuNeuron(Neuron n)
 {
     layers.ElementAt(layers.Count - 1).Remove(n);
 }
Ejemplo n.º 6
0
 public void AddOutputNeuron(Neuron n)
 {
     n.parentNetwork = this;
     n.name          = "Output_Neuron" + layers.ElementAt(layers.Count - 1).Count.ToString();
     layers.ElementAt(layers.Count - 1).Add(n);
 }
Ejemplo n.º 7
0
 public void AddHiddenNeuron(Neuron n, int layer)
 {
     n.parentNetwork = this;
     n.name          = "Hidden_Neuron_" + layer.ToString() + "_" + layers.ElementAt(layer).Count.ToString();
     layers.ElementAt(layer).Add(n);
 }
Ejemplo n.º 8
0
 public void RemoveOutpuNeuron(Neuron n)
 {
     outputLayer.Remove(n);
 }
Ejemplo n.º 9
0
 public void RemoveHiddenNeuron(Neuron n)
 {
     hiddenLayer.Remove(n);
 }
Ejemplo n.º 10
0
 public void RemoveInputNeuron(Neuron n)
 {
     inputLayer.Remove(n);
 }
Ejemplo n.º 11
0
 public void AddHiddenNeuron(Neuron n)
 {
     n.parentNetwork = this;
     n.name          = "Hidden_Neuron" + hiddenLayer.Count.ToString();
     hiddenLayer.Add(n);
 }
Ejemplo n.º 12
0
 public void AddOutputNeuron(Neuron n)
 {
     n.parentNetwork = this;
     n.name          = "Output_Neuron" + outputLayer.Count.ToString();
     outputLayer.Add(n);
 }
Ejemplo n.º 13
0
 //Setup Functions
 public void AddInputNeuron(Neuron n)
 {
     n.parentNetwork = this;
     n.name          = "Input_Neuron" + inputLayer.Count.ToString();
     inputLayer.Add(n);
 }
Ejemplo n.º 14
0
        public static void AdjustWeights(ANN i_ANN, float[] i_DesiredOutputs)
        {
            if (i_ANN == null || i_DesiredOutputs == null || i_DesiredOutputs.Length != i_ANN.ANNOutputCount)
            {
                return;
            }

            // Init error gradients map (layer x neuron).

            List <List <float> > layerErrorGradients = new List <List <float> >();

            CommonFunctionLibrary.InitListNewElements <List <float> >(layerErrorGradients, i_ANN.layerCount);

            // Iterate all layers, starting from the output one: this is a backward propagation.

            for (int layerIndex = i_ANN.layerCount - 1; layerIndex >= 0; --layerIndex)
            {
                // Get layer and its error gradients entry.

                Layer layer       = i_ANN.GetLayer(layerIndex);
                bool  isLastLayer = (layerIndex == i_ANN.layerCount - 1);
                Layer nextLayer   = (isLastLayer) ? null : i_ANN.GetLayer(layerIndex + 1);

                List <float> neuronErrorGradients          = layerErrorGradients[layerIndex];
                List <float> nextLayerNeuronErrorGradients = (isLastLayer) ? null : layerErrorGradients[layerIndex + 1];

                // Iterate neuron.

                for (int neuronIndex = 0; neuronIndex < layer.neuronCount; ++neuronIndex)
                {
                    Neuron          neuron = layer.GetNeuron(neuronIndex);
                    NeuronExecution lastNeuronExecution = neuron.lastNeuronExecution;

                    // Compute current error gradient.

                    float errorGradient = 0f;

                    if (isLastLayer)
                    {
                        // If this is the last layer just use iDesired - iActual.

                        float error = i_DesiredOutputs[neuronIndex] - lastNeuronExecution.output;
                        errorGradient = lastNeuronExecution.output * (1f - lastNeuronExecution.output) * error; // This is called DeltaRule (https://en.wikipedia.org/wiki/Delta_rule)
                    }
                    else
                    {
                        // If this is not the final layer, use a weighted error gradient baed on next layer error gradients.

                        errorGradient = lastNeuronExecution.output * (1f - lastNeuronExecution.output);

                        float nextLayerErrorGradientSum = 0f;
                        for (int nextLayerNeuronIndex = 0; nextLayerNeuronIndex < nextLayer.neuronCount; ++nextLayerNeuronIndex)
                        {
                            Neuron nextLayerNeuron        = nextLayer.GetNeuron(nextLayerNeuronIndex);
                            float  nextLayerErrorGradient = nextLayerNeuronErrorGradients[nextLayerNeuronIndex];
                            nextLayerErrorGradientSum += nextLayerErrorGradient * nextLayerNeuron.GetWeight(neuronIndex);
                        }

                        errorGradient *= nextLayerErrorGradientSum;
                    }

                    neuronErrorGradients.Add(errorGradient);

                    // Iterate over weights and adjust them.

                    for (int weightIndex = 0; weightIndex < neuron.inputCount; ++weightIndex)
                    {
                        float weight = neuron.GetWeight(weightIndex);

                        if (isLastLayer)
                        {
                            // If this is the last layer, act as do on a simple perceptron.

                            float error = i_DesiredOutputs[neuronIndex] - lastNeuronExecution.output;
                            weight = weight + i_ANN.alpha * lastNeuronExecution.GetInput(weightIndex) * error;
                        }
                        else
                        {
                            // If this is not the final layer, use error gradient as error.

                            weight = weight + i_ANN.alpha * lastNeuronExecution.GetInput(weightIndex) * errorGradient;
                        }

                        neuron.SetWeight(weightIndex, weight);
                    }

                    // Adjust bias as usual (keeping the learning rate).

                    neuron.bias = neuron.bias + i_ANN.alpha * errorGradient;
                }
            }
        }