Beispiel #1
0
 private static void Update()
 {
     for (int l = 0; l <= L; l++)
     {
         deltaW[l] = MatrixMath.Multiply(deltaW[l], 1.0 / batchSize);
         deltab[l] = MatrixMath.Multiply(deltab[l], 1.0 / batchSize);
         W[l]      = MatrixMath.Subtract(W[l], MatrixMath.Multiply(MatrixMath.Add(deltaW[l], MatrixMath.Multiply(W[l], weightDecay)), learningRate));
         b[l]      = MatrixMath.Subtract(b[l], MatrixMath.Multiply(deltab[l], learningRate));
     }
 }
Beispiel #2
0
        private static void Backward(int k)
        {
            errors[L + 1] = MatrixMath.Subtract(A[L + 1], label); //error at output layer neurons
            for (int l = L; l >= 1; l--)                          // error at hidden layers neurons e[l] = W[l].Transpose * e[l+1]) . F'(Z[l])    where * is matrix multiplication and . is hadamard multiplication
            {
                errors[l] = MatrixMath.HadamardProduct(MatrixMath.Multiply(MatrixMath.Transpose(W[l]), errors[l + 1]), MatrixMath.F(Z[l], layerFunctions[l], epsilonLeaky, true));
            }
            for (int l = 0; l <= L; l++)
            {
                dW[l] = MatrixMath.Multiply(errors[l + 1], MatrixMath.Transpose(A[l]));
                db[l] = errors[l + 1];
            }

            for (int l = 0; l <= L; l++)
            {
                acc_dW[k][l] = dW[l].Clone();
                acc_db[k][l] = db[l].Clone();
                //deltaW[l] = MatrixMath.Add(deltaW[l], dW[l]);
                //deltab[l] = MatrixMath.Add(deltab[l], db[l]);
            }
        }