コード例 #1
0
        public void PropagateLayer(BNet net, BLayer upper)
        {
            double sum;

            for (int i = 1; i <= upper.Units; i++)
            {
                sum = 0;
                for (int j = 0; j <= Units; j++)
                {
                    sum += upper.Weights[i][j] * Output[j];
                }
                upper.Output[i] = 1 / (1 + Math.Exp(-net.Gain * sum));//sigmoid
            }
        }
コード例 #2
0
        public void BackpropagateLayer(BNet net, BLayer lower)
        {
            double Out, Err;

            for (int i = 1; i <= lower.Units; i++)
            {
                Out = lower.Output[i];
                Err = 0;
                for (int j = 1; j <= Units; j++)
                {
                    Err += Weights[j][i] * Error[j];
                }
                lower.Error[i] = net.Gain * Out * (1 - Out) * Err;
            }
        }
コード例 #3
0
        public BNet(int[] neurons, double gain, double eta, double alpha /*, int inputsCount, int layersCount*/) : base(neurons[0], neurons.Length)
        {
            this.inputsCount = neurons[0];
            this.layersCount = neurons.Length;

            this.Gain  = gain;
            this.Eta   = eta;
            this.Alpha = alpha;
            this.Neu   = neurons;

            layers[0] = new BLayer(neurons[0], neurons[0], -1, true);
            for (int l = 1; l < layersCount; l++)
            {
                layers[l] = new BLayer(neurons[l], neurons[l], neurons[l - 1], false);
            }

            in_layer  = (BLayer)layers[0];
            out_layer = (BLayer)layers[layersCount - 1];
        }