Ejemplo n.º 1
0
 /// <summary>
 /// 正向传播
 /// </summary>
 private void Propagate()
 {
     for (int i = 0; i < _allLayer.Count - 1; i++)
     {
         ENeuralAct act = (_allLayer[i + 1].LayerType == ENeuralLayerType.Output) ? _outputNeuralAct : _generalNeuralAct;
         _allLayer[i].Propagate(act, _allLayer[i + 1]);
     }
 }
Ejemplo n.º 2
0
        /// <summary>
        /// 正向传播
        /// </summary>
        public void Propagate(ENeuralAct neuralACT, NeuralLayer nextLayer)
        {
            int numNeurons = nextLayer.Neurons.Count;

            for (int i = 0; i < numNeurons; ++i)
            {
                float value = 0.0f;

                int numWeights = Neurons.Count;
                for (int j = 0; j < numWeights; ++j)
                {
                    // sum the (weights * inputs), the inputs are the outputs of the prop layer
                    value += nextLayer.Neurons[i].Weights[j] * Neurons[j].Output;
                }

                // add in the bias (always has an input of -1)
                value += nextLayer.Neurons[i].Weights[numWeights] * -1.0f;

                // store the outputs, but run activation first
                switch (neuralACT)
                {
                case ENeuralAct.Step:
                    nextLayer.Neurons[i].Output = ActStep(value);
                    break;

                case ENeuralAct.Tanh:
                    nextLayer.Neurons[i].Output = ActTanh(value);
                    break;

                case ENeuralAct.Logistic:
                    nextLayer.Neurons[i].Output = ActLogistic(value);
                    break;

                case ENeuralAct.BipolarSigmoid:
                    nextLayer.Neurons[i].Output = ActBipolarSigmoid(value);
                    break;

                case ENeuralAct.Linear:
                    nextLayer.Neurons[i].Output = value;
                    break;

                default:
                    throw new Exception("Should never get here.");
                }
            }

            //if you wanted to run the Softmax activation function, you
            //would do it here, since it needs all the output values
            //if you pushed all the outputs into a vector, you could...
            //outputs = ActSoftmax(outputs);
            //and then put the outputs back into the correct slots
        }
Ejemplo n.º 3
0
        /// <summary>
        /// 反向传播
        /// </summary>
        public void BackPropagate(ENeuralAct neuralACT, NeuralLayer nextLayer)
        {
            int numNeurons = nextLayer.Neurons.Count;

            for (int i = 0; i < numNeurons; ++i)
            {
                float outputVal = nextLayer.Neurons[i].Output;
                float error     = 0;
                for (int j = 0; j < Neurons.Count; ++j)
                {
                    error += Neurons[j].Weights[i] * Neurons[j].Error;
                }

                switch (neuralACT)
                {
                case ENeuralAct.Tanh:
                    nextLayer.Neurons[i].Error = DerTanh(outputVal) * error;
                    break;

                case ENeuralAct.Logistic:
                    nextLayer.Neurons[i].Error = DerLogistic(outputVal) * error;
                    break;

                case ENeuralAct.BipolarSigmoid:
                    nextLayer.Neurons[i].Error = DerBipolarSigmoid(outputVal) * error;
                    break;

                case ENeuralAct.Linear:
                    nextLayer.Neurons[i].Error = outputVal * error;
                    break;

                default:
                {
                    throw new NotImplementedException();
                }
                }
            }
        }