コード例 #1
0
        //Backward Propagation.
        public Matrix Backwards(Matrix yin, Boolean end = false, Boolean learn = true)
        {
            Matrix y = yin;

            #if ADD_BIAS
            if (!end)
            {
                y = Matrix.RemoveBias(yin);
            }
            #endif
            if (y.Rows != batchsize)
            {
                throw new Exception("Batch-size is inconsistant.");
            }
            if (y.Columns != nodes.Columns)
            {
                Debug.Log("Y: " + y.Columns + " nodes: " + nodes.Columns);
                throw new Exception("Inconsistant number of inputs for backward.");
            }

            Matrix err;
            Matrix delta;

            //This only works on the last layer. >:
            if (end)
            {
                err = y - nodes; //Only supports "last"
            }
            else
            {
                err = y;                                                           //Takes in error from prior result.
            }
            delta = Matrix.HadamardProd(err, Matrix.Transform(func.deriv, nodes)); //Adjust for the slope

            if (learn)
            {
                weights = weights + (Matrix.Transpose(x) * delta); //Apply the values.
            }

            return(delta * Matrix.Transpose(weights));
        }
コード例 #2
0
        private void BackwardPropagation()
        {
            int last = hidLayer.results.Count - 1;

            errorOutput = targetOutput - outputResult;
            //do initial fix for out put to last hidden layer
            Matrix deltaLayer     = Matrix.HadamardProd(Matrix.Transform(SigmoidPrime, outputSum), errorOutput);
            Matrix layerChanges   = Matrix.Transpose(hidLayer.results[last]) * deltaLayer * learningRate;
            Matrix previousWieght = hiddenOutputWeights;

            hiddenOutputWeights += layerChanges;

            for (int i = 0; i < last; i++)
            {
                deltaLayer     = Matrix.HadamardProd(deltaLayer * Matrix.Transpose(previousWieght), Matrix.Transform(SigmoidPrime, hidLayer.sums[last - i]));
                layerChanges   = Matrix.Transpose(hidLayer.results[last - i]) * deltaLayer * learningRate;
                previousWieght = hidLayer.wieghts[last - 1 - i];
                hidLayer.wieghts[last - 1 - i] += layerChanges;
            }

            deltaLayer          = Matrix.HadamardProd(deltaLayer * Matrix.Transpose(previousWieght), Matrix.Transform(SigmoidPrime, hidLayer.sums[0]));
            layerChanges        = Matrix.Transpose(inputLayer) * deltaLayer * learningRate;
            inputHiddenWeights += layerChanges;
        }