Ejemplo n.º 1
0
        public void Calculate(double[] inputVector, int iCount,
                              double[] outputVector /* =NULL */, int oCount /* =0 */,
                              NNNeuronOutputsList pNeuronOutputs /* =NULL */)
        {
            var lit = m_Layers.First();

            // first layer is imput layer: directly set outputs of all of its neurons
            // to the input vector
            if (m_Layers.Count > 1)
            {
                int count = 0;
                if (iCount != lit.m_Neurons.Count)
                {
                    return;
                }
                foreach (var nit in lit.m_Neurons)
                {
                    if (count < iCount)
                    {
                        nit.output = inputVector[count];
                        count++;
                    }
                }
            }
            //caculate output of next layers
            for (int i = 1; i < m_Layers.Count; i++)
            {
                m_Layers[i].Calculate();
            }

            // load up output vector with results

            if (outputVector != null)
            {
                lit = m_Layers[m_Layers.Count - 1];

                for (int ii = 0; ii < oCount; ii++)
                {
                    outputVector[ii] = lit.m_Neurons[ii].output;
                }
            }

            // load up neuron output values with results
            if (pNeuronOutputs != null)
            {
                // check for first time use (re-use is expected)
                pNeuronOutputs.Clear();
                // it's empty, so allocate memory for its use
                pNeuronOutputs.Capacity = m_Layers.Count;
                foreach (NNLayer nnlit in m_Layers)
                {
                    var layerOut = new NNNeuronOutputs(nnlit.m_Neurons.Count);
                    for (int ii = 0; ii < nnlit.m_Neurons.Count; ++ii)
                    {
                        layerOut.Add(nnlit.m_Neurons[ii].output);
                    }
                    pNeuronOutputs.Add(layerOut);
                }
            }
        }
Ejemplo n.º 2
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="bFromRandomizedPatternSequence"></param>
        /// <returns></returns>


        /////////////////////////
        /// <summary>
        /// Get Next Parttern in Parttern List
        /// </summary>
        /// <param name="iSequenceNum"></param>
        /// <param name="bFromRandomizedPatternSequence"></param>
        /// <returns></returns>
        public void CalculateNeuralNet(double[] inputVector, int count,
                                       double[] outputVector /* =NULL */, int oCount /* =0 */,
                                       NNNeuronOutputsList pNeuronOutputs /* =NULL */,
                                       bool bDistort /* =FALSE */)
        {
            // wrapper function for neural net's Calculate() function, needed because the NN is a protected member
            // waits on the neural net mutex (using the CAutoMutex object, which automatically releases the
            // mutex when it goes out of scope) so as to restrict access to one thread at a time
            m_Mutexs[0].WaitOne();
            {
                if (bDistort != false)
                {
                    GenerateDistortionMap(1.0);
                    ApplyDistortionMap(inputVector);
                }


                _NN.Calculate(inputVector, count, outputVector, oCount, pNeuronOutputs);
            }
            m_Mutexs[0].ReleaseMutex();
        }
Ejemplo n.º 3
0
        public void Backpropagate(double[] actualOutput, double[] desiredOutput, int count, NNNeuronOutputsList pMemorizedNeuronOutputs)
        {
            // backpropagates through the neural net

            if ((m_Layers.Count >= 2) == false)        // there must be at least two layers in the net
            {
                return;
            }
            if ((actualOutput == null) || (desiredOutput == null) || (count >= 256))
            {
                return;
            }


            // check if it's time for a weight sanity check

            m_cBackprops++;

            if ((m_cBackprops % 10000) == 0)
            {
                // every 10000 backprops

                PeriodicWeightSanityCheck();
            }


            // proceed from the last layer to the first, iteratively
            // We calculate the last layer separately, and first, since it provides the needed derviative
            // (i.e., dErr_wrt_dXnm1) for the previous layers

            // nomenclature:
            //
            // Err is output error of the entire neural net
            // Xn is the output vector on the n-th layer
            // Xnm1 is the output vector of the previous layer
            // Wn is the vector of weights of the n-th layer
            // Yn is the activation value of the n-th layer, i.e., the weighted sum of inputs BEFORE the squashing function is applied
            // F is the squashing function: Xn = F(Yn)
            // F' is the derivative of the squashing function
            //   Conveniently, for F = tanh, then F'(Yn) = 1 - Xn^2, i.e., the derivative can be calculated from the output, without knowledge of the input

            int iSize           = m_Layers.Count;
            var dErr_wrt_dXlast = new DErrorsList(m_Layers[m_Layers.Count - 1].m_Neurons.Count);
            var differentials   = new List <DErrorsList>(iSize);

            int ii;

            // start the process by calculating dErr_wrt_dXn for the last layer.
            // for the standard MSE Err function (i.e., 0.5*sumof( (actual-target)^2 ), this differential is simply
            // the difference between the target and the actual

            for (ii = 0; ii < m_Layers[m_Layers.Count - 1].m_Neurons.Count; ++ii)
            {
                dErr_wrt_dXlast.Add(actualOutput[ii] - desiredOutput[ii]);
            }


            // store Xlast and reserve memory for the remaining vectors stored in differentials



            for (ii = 0; ii < iSize - 1; ii++)
            {
                var m_differential = new DErrorsList(m_Layers[ii].m_Neurons.Count);
                for (int kk = 0; kk < m_Layers[ii].m_Neurons.Count; kk++)
                {
                    m_differential.Add(0.0);
                }
                differentials.Add(m_differential);
            }
            differentials.Add(dErr_wrt_dXlast);          // last one
            // now iterate through all layers including the last but excluding the first, and ask each of
            // them to backpropagate error and adjust their weights, and to return the differential
            // dErr_wrt_dXnm1 for use as the input value of dErr_wrt_dXn for the next iterated layer

            bool bMemorized = (pMemorizedNeuronOutputs != null);

            for (int jj = iSize - 1; jj > 0; jj--)
            {
                if (bMemorized != false)
                {
                    m_Layers[jj].Backpropagate(differentials[jj], differentials[jj - 1],
                                               pMemorizedNeuronOutputs[jj], pMemorizedNeuronOutputs[jj - 1], m_etaLearningRate);
                }
                else
                {
                    m_Layers[jj].Backpropagate(differentials[jj], differentials[jj - 1],
                                               null, null, m_etaLearningRate);
                }
            }


            differentials.Clear();
        }