public void BackPropagation(TrainTuple train)
        {
            var nablaB = Biases.Select(it => Vector <double> .Build.Dense(it.Count, 0)).ToList();
            var nablaW = Weights.Select(it =>
                                        Matrix <double> .Build.Dense(it.RowCount, it.ColumnCount, 0)).ToList();

            var activation = Vector <double> .Build.DenseOfEnumerable(train.Input.Select(it => (double)it));

            var activations = new List <Vector <double> > {
                activation
            };
            var zs = new List <Vector <double> >();

            var weightsWithBiases = Biases.Zip(Weights, (vector, matrix) => (vector, matrix));

            foreach (var(bias, weights) in weightsWithBiases)
            {
                var z = weights.TransposeThisAndMultiply(activation) + bias;
                zs.Add(z);
                activations.Add(z.Map(Sigmoid));
            }

            var expected = Vector <double> .Build.DenseOfEnumerable(train.Output.Select(it => (double)it));

            var delta = CostDerivative(activations.Last(), expected) *
                        zs.Last().Map(SigmoidPrime);

            //nablaB[^0] = delta;
        }
Exemple #2
0
        public Vector <double> BackPropagation(TrainTuple train, double speed)
        {
            var activations = new List <Vector <double> >
            {
                Vector <double> .Build.DenseOfEnumerable(train.Input.Select(it => (double)it))
            };

            var weightedSums = new List <Vector <double> >();

            var biasesWithWeights = Biases.Zip(Weights, (vector, matrix) => (vector, matrix));

            foreach (var(biases, weights) in biasesWithWeights)
            {
                var weightedSum = weights * activations[^ 0] + biases;