Пример #1
0
        private void UpdateWeights(IEnumerator Inputs)
        {
            for (int i = 0; i < neuronLayers.Count; i++)
            {
                NeuronLayer processingLayer = neuronLayers[i];
                double      entry;
                int         j = 0;
                while (Inputs.MoveNext())
                {
                    entry = Inputs.Current.GetType() == typeof(Neuron) ? ((Neuron)Inputs.Current).activation : (double)Inputs.Current;
                    foreach (Neuron neuron in processingLayer)
                    {
                        neuron.weights[j] += _learningRate * neuron.delta * entry;
                        neuron.weights[j] += _momentum * neuron.prev_delta;
                        neuron.weights[j] -= _decay * neuron.weights[j];

                        neuron.bias          += _learningRate * neuron.bias_delta * 1;
                        neuron.bias          += _momentum * neuron.bias_prevdelta;
                        neuron.bias          -= _decay * neuron.bias;
                        neuron.bias_prevdelta = _learningRate * neuron.bias_delta * 1;
                    }
                    j++;
                }
                Inputs = processingLayer.GetEnumerator();
            }
        }
Пример #2
0
        /// <summary>
        /// Backward - Propagation
        /// </summary>
        /// <param name="ExpectedResult">Expected output of Neural network</param>
        private void BackProp(double[] ExpectedResult)
        {
            for (int i = 0; i < ExpectedResult.Length; i++)
            {
                Neuron currNeuron = neuronLayers.Last <NeuronLayer>()[i];
                currNeuron.delta = (1 + currNeuron.activation) * (1 - currNeuron.activation) * (ExpectedResult[i] - currNeuron.activation);
            }

            // -2 because the last one processed above
            for (int i = neuronLayers.Count - 2; i >= 0; i--)
            {
                NeuronLayer processingLayer = neuronLayers[i];
                Neuron      processingNeuron;
                NeuronLayer backLayer = neuronLayers[i + 1];

                for (int j = 0; j < processingLayer.Count; j++)
                {
                    processingNeuron = processingLayer[j];
                    double total = 0;
                    foreach (Neuron backLayerNeuron in backLayer)
                    {
                        total += backLayerNeuron.delta * backLayerNeuron.weights[j];
                    }
                    processingNeuron.delta      = (1 + processingNeuron.activation) * (1 - processingNeuron.activation) * total;
                    processingNeuron.bias_delta = (1 + processingNeuron.activation) * (1 - processingNeuron.activation) * 1;
                }
            }
        }
Пример #3
0
        public double ErorrCost(double[] ExpectedResult)
        {
            double      errorCostValue = 0;
            NeuronLayer outputLayer    = neuronLayers.Last <NeuronLayer>();

            for (int i = 0; i < ExpectedResult.Length; i++)
            {
                errorCostValue += Math.Pow(outputLayer[i].activation - ExpectedResult[i], 2);
            }
            return(errorCostValue);
        }