Ejemplo n.º 1
0
        // method to compute a forward pass through the network
        // input values are assumed to be scaled between 0 and 1
        public double[] ForwardPass(double[] input)
        {
            // add bias to the input
            double[] inputWithBias = LA.Append(input, 1.0);

            // multiply input by first weight matrix
            double[] hiddenLayerInput = LA.MatByVec(WeightMat1, inputWithBias);

            // take sigmoid of terms
            double[] hiddenLayerOutputNoBias = NNMath.VectorSigmoid(hiddenLayerInput);

            // add bias term
            double[] hiddenLayerOutput = LA.Append(hiddenLayerOutputNoBias, 1.0);

            // multiply by second weight matrix
            double[] outputLayerInput = LA.MatByVec(WeightMat2, hiddenLayerOutput);

            // take sigmoid of terms
            double[] networkOutput = NNMath.VectorSigmoid(outputLayerInput);

            // return network output
            return(networkOutput);
        }
Ejemplo n.º 2
0
        // method for backpropogation
        // x is the network input, and target is the desired network output
        public void Backprop(double[] x, double[] target)
        {
            // add bias to the input
            double[] inputWithBias = LA.Append(x, 1.0);

            // multiply biased input by first weight matrix
            double[] hiddenLayerInput = LA.MatByVec(NeuralNet.WeightMat1, inputWithBias);

            // take sigmoid of terms
            double[] hiddenLayerOutput = NNMath.VectorSigmoid(hiddenLayerInput);

            // add bias term
            double[] h = LA.Append(hiddenLayerOutput, 1.0);

            // multiply by second weight matrix
            double[] outputLayerInput = LA.MatByVec(NeuralNet.WeightMat2, h);

            // take sigmoid of terms
            double[] output = NNMath.VectorSigmoid(outputLayerInput);

            // find error in network output
            int dim = target.GetLength(0);

            double[] e = new double[dim];
            for (int i = 0; i < dim; i++)
            {
                e[i] = target[i] - output[i];
            }

            // find output layer deltas
            double[] outputDeltas = new double[dim];
            for (int i = 0; i < dim; i++)
            {
                outputDeltas[i] = e[i] * output[i] * (1 - output[i]);
            }

            // update second weight matrix
            //Parallel.For(0, NeuralNet.WeightMat2.GetLength(0), i =>
            for (int i = 0; i < NeuralNet.WeightMat2.GetLength(0); i++)
            {
                for (int j = 0; j < NeuralNet.WeightMat2.GetLength(1); j++)
                {
                    NeuralNet.WeightMat2[i, j] += Eta * outputDeltas[i] * h[j] + MomentumMat2[i, j] * Momentum;
                    MomentumMat2[i, j]          = Eta * outputDeltas[i] * h[j];
                }
            }

            // find hidden layer deltas
            double[] hiddenDeltas = new double[NumHidden];
            double   tempVal;

            for (int j = 0; j < NumHidden; j++)
            {
                tempVal = 0.0;
                for (int k = 0; k < NumOutputs; k++)
                {
                    tempVal += NeuralNet.WeightMat2[k, j] * outputDeltas[k] * h[j] * (1 - h[j]);
                }
                hiddenDeltas[j] = tempVal;
            }

            // update first weight matrix
            for (int i = 0; i < NeuralNet.WeightMat1.GetLength(0); i++)
            {
                for (int j = 0; j < NeuralNet.WeightMat1.GetLength(1); j++)
                {
                    NeuralNet.WeightMat1[i, j] += Eta * hiddenDeltas[i] * inputWithBias[j] + MomentumMat1[i, j] * Momentum;
                    MomentumMat1[i, j]          = Eta * hiddenDeltas[i] * inputWithBias[j];
                }
            }
        }