예제 #1
0
        public void PropagateLayer(BNet net, BLayer upper)
        {
            double sum;

            for (int i = 1; i <= upper.Units; i++)
            {
                sum = 0;
                for (int j = 0; j <= Units; j++)
                {
                    sum += upper.Weights[i][j] * Output[j];
                }
                upper.Output[i] = 1 / (1 + Math.Exp(-net.Gain * sum));//sigmoid
            }
        }
예제 #2
0
        public void BackpropagateLayer(BNet net, BLayer lower)
        {
            double Out, Err;

            for (int i = 1; i <= lower.Units; i++)
            {
                Out = lower.Output[i];
                Err = 0;
                for (int j = 1; j <= Units; j++)
                {
                    Err += Weights[j][i] * Error[j];
                }
                lower.Error[i] = net.Gain * Out * (1 - Out) * Err;
            }
        }