Exemple #1
0
        private void CalculateHiddenToOutputWeightChanges(Double[] idealOutput, Double learningRate)
        {
            m_totalOutputError = 0; // reset total error.
            for (Int32 outputNeuronIndex = 0; outputNeuronIndex < m_numOutput; outputNeuronIndex++)
            {
                // Total Squared Error for each output neuron.
                m_outputSquaredErrors[outputNeuronIndex] = BackpropFunctions.SquaredErrorFunction(idealOutput[outputNeuronIndex], m_outputLayerOutput[outputNeuronIndex]);
                // Total Error of network
                m_totalOutputError += m_outputSquaredErrors[outputNeuronIndex];
                // Partial Derivative for total error with respect to output.  (Derivative of total squared error results in this)
                m_outputPartialDerivTotalErrorOverOutputNeuron[outputNeuronIndex] = m_outputLayerOutput[outputNeuronIndex] - idealOutput[outputNeuronIndex];
                // Partial Derivative of output of a particular neuron respecting its total net input (derivative of activation function)
                m_outputPartialDerivOutputNeuronOverTotalInput[outputNeuronIndex] = ActivationFunctions.TanhDerivative(m_outputLayerOutput[outputNeuronIndex]);

                for (Int32 hiddenNeuronIndex = 0; hiddenNeuronIndex < m_numHidden + BiasNeuronCount; hiddenNeuronIndex++)
                {
                    // Partial Derivative of total net input with respect to weight.  Basically, the output of the hidden neuron.
                    m_outputPartialDerivTotalInputOverWeight[hiddenNeuronIndex, outputNeuronIndex] = m_hiddenLayerOutput[hiddenNeuronIndex];
                    // Multiply the partial derivatives together
                    Double deltaOutputNeuron = m_outputPartialDerivTotalErrorOverOutputNeuron[outputNeuronIndex] *
                                               m_outputPartialDerivOutputNeuronOverTotalInput[outputNeuronIndex] *
                                               m_outputPartialDerivTotalInputOverWeight[hiddenNeuronIndex, outputNeuronIndex];
                    Double changeInWeight = deltaOutputNeuron * learningRate;
                    m_hoWeightChanges[hiddenNeuronIndex, outputNeuronIndex] = changeInWeight;
                }
            }
        }
Exemple #2
0
        private void CalculateInputToHiddenWeightChanges(Double learningRate)
        {
            // We need to figure out the partial derivative of the total error with respect to each weight.
            // This is the same as:
            // Partial Derivative of Total Error with respect to the output of the hidden neuron *
            // (This is equal to the sum of partial derivatives of each output neuron's squared error with respect to the hidden neuron's output)
            // Partial Derivative of the output of the hidden neuron with respect to its net input *
            // Partial Derivative of the Net Input with respect to the weight from a input neuron to the hidden neuron.
            for (Int32 hiddenNeuronIndex = 0; hiddenNeuronIndex < m_numHidden; hiddenNeuronIndex++)
            {
                for (Int32 inputNeuronIndex = 0; inputNeuronIndex < m_numInput + BiasNeuronCount; inputNeuronIndex++)
                {
                    m_hiddenPartialDerivTotalErrorOverHiddenOutput[inputNeuronIndex, hiddenNeuronIndex] = 0.0;
                    Double runningTotal = 0.0;
                    for (Int32 outputNeuronIndex = 0; outputNeuronIndex < m_numOutput; outputNeuronIndex++)
                    {
                        runningTotal += m_outputPartialDerivTotalErrorOverOutputNeuron[outputNeuronIndex] *
                                        m_outputPartialDerivOutputNeuronOverTotalInput[outputNeuronIndex] *
                                        m_ho[hiddenNeuronIndex, outputNeuronIndex];
                    }
                    m_hiddenPartialDerivTotalErrorOverHiddenOutput[inputNeuronIndex, hiddenNeuronIndex] = runningTotal;

                    m_hiddenPartialDerivHiddenOutputOverNetInput[inputNeuronIndex, hiddenNeuronIndex] =
                        ActivationFunctions.TanhDerivative(m_hiddenLayerOutput[hiddenNeuronIndex]);

                    m_hiddenPartialDerivNetInputOverWeight[inputNeuronIndex, hiddenNeuronIndex] = m_lastInput[inputNeuronIndex];

                    m_ihWeightChanges[inputNeuronIndex, hiddenNeuronIndex] = learningRate *
                                                                             m_hiddenPartialDerivTotalErrorOverHiddenOutput[inputNeuronIndex, hiddenNeuronIndex] *
                                                                             m_hiddenPartialDerivHiddenOutputOverNetInput[inputNeuronIndex, hiddenNeuronIndex] *
                                                                             m_hiddenPartialDerivNetInputOverWeight[inputNeuronIndex, hiddenNeuronIndex];
                }
            }
        }
Exemple #3
0
        /// <summary>
        /// Given the input (should be one entry for each input neuron, excluding the bias, whose input is always 1), calculate and store
        /// the output of the hidden and output layers.
        /// </summary>
        /// <param name="input">An array of values corresponding to the input for the input layer.</param>
        public void ComputeOutput(Double[] input)
        {
            if (input.Length != m_numInput)
            {
                throw new Exception("The input needs to be " + this.m_numInput.ToString() + " in length.");
            }

            for (Int32 i = 0; i < m_numInput; i++)
            {
                m_lastInput[i] = input[i];
            }


            // Step1: Compute output of Hidden Layer
            for (Int32 i = 0; i < m_numHidden; i++)
            {
                m_hiddenLayerOutput[i] = 0; // Initialize
                for (Int32 j = 0; j < m_numInput + BiasNeuronCount; j++)
                {
                    if (j == m_numInput) // Bias
                    {
                        m_hiddenLayerOutput[i] += (m_ih[j, i] * 1.0);
                    }
                    else
                    {
                        m_hiddenLayerOutput[i] += (m_ih[j, i] * input[j]);
                    }
                }
                // Now that you summed weights * inputs, squash
                m_hiddenLayerOutput[i] = ActivationFunctions.Tanh(m_hiddenLayerOutput[i]);
            }

            // Step2: Compute output of Output Layer
            for (Int32 i = 0; i < this.m_numOutput; i++)
            {
                m_outputLayerOutput[i] = 0; // Initialize
                for (Int32 j = 0; j < this.m_numHidden + BiasNeuronCount; j++)
                {
                    if (j == NumHidden)
                    {
                        m_outputLayerOutput[i] += (m_ho[j, i] * 1.0);
                    }
                    else
                    {
                        m_outputLayerOutput[i] += (m_ho[j, i] * m_hiddenLayerOutput[j]);
                    }
                }
                // Now that you summed weights * inputs, squash
                m_outputLayerOutput[i] = ActivationFunctions.Tanh(m_outputLayerOutput[i]);
            }
        }