Ejemplo n.º 1
0
        float SumDOW(NeuronLayer nextLayer)
        {
            float sum = 0.0f;

            for (int i = 0; i < nextLayer.Neurons.Count; i++)
            {
                sum += Weights[i] * nextLayer.Neurons[i].Gradient;
            }

            return(sum);
        }
Ejemplo n.º 2
0
        public void BackPropagate(float[] targetValues, float learningRate = 0.3f, float momentum = 0.5f)
        {
            if (NeuronLayers.Count == 0)
            {
                Debug.LogError("BackPropagate failed because no layers exist in the neural network");
                return;
            }

            NeuronLayer outputLayer = NeuronLayers[NeuronLayers.Count - 1];

            if (targetValues.Length != outputLayer.Neurons.Count)
            {
                Debug.Log("BackPropagate passed " + targetValues.Length.ToString() + " elements but needed " + outputLayer.Neurons.Count.ToString());
                return;
            }

            error = 0.0f;

            for (int i = 0; i < outputLayer.Neurons.Count; i++)
            {
                float delta = targetValues[i] - outputLayer.Neurons[i].Output;
                error += delta * delta;
            }

            error /= outputLayer.Neurons.Count;
            error  = Mathf.Sqrt(error);

            for (int i = 0; i < outputLayer.Neurons.Count; i++)
            {
                outputLayer.Neurons[i].CalculateOutputGradients(targetValues[i]);
            }

            for (int i = NeuronLayers.Count - 2; i > 0; i--)
            {
                NeuronLayer hiddenLayer = NeuronLayers[i];
                NeuronLayer nextLayer   = NeuronLayers[i + 1];

                for (int j = 0; j < hiddenLayer.Neurons.Count; j++)
                {
                    hiddenLayer.Neurons[j].CalculateHiddenGradients(nextLayer);
                }
            }

            for (int i = NeuronLayers.Count - 1; i > 0; i--)
            {
                NeuronLayer layer         = NeuronLayers[i];
                NeuronLayer previousLayer = NeuronLayers[i - 1];

                for (int j = 0; j < layer.Neurons.Count; j++)
                {
                    layer.Neurons[j].UpdateWeights(previousLayer);
                }
            }
        }
Ejemplo n.º 3
0
        public void FeedForward(NeuronLayer inputLayer)
        {
            float sum = 0.0f;

            foreach (Neuron neuron in inputLayer.Neurons)
            {
                sum += neuron.Weights[index] * neuron.Output;
            }

            sum += inputLayer.Bias;

            Output = Transfer(sum);
        }
Ejemplo n.º 4
0
        public void UpdateWeights(NeuronLayer previousLayer, float learningRate = 0.3f, float momentum = 0.5f)
        {
            for (int i = 0; i < previousLayer.Neurons.Count; i++)
            {
                Neuron neuron = previousLayer.Neurons[i];

                float oldDeltaWeight = neuron.DeltaWeights[index];

                float newDeltaWeight = learningRate * neuron.Output * Gradient + momentum * oldDeltaWeight;

                neuron.DeltaWeights[index] = newDeltaWeight;
                neuron.Weights[index]     += newDeltaWeight;
            }
        }
Ejemplo n.º 5
0
        public void CalculateHiddenGradients(NeuronLayer nextLayer)
        {
            float dow = SumDOW(nextLayer);

            Gradient = dow * TransferDerivative(Output);
        }