示例#1
0
文件: Program.cs 项目: Muhaeb/TestNN
 private static void Update()
 {
     for (int l = 0; l <= L; l++)
     {
         deltaW[l] = MatrixMath.Multiply(deltaW[l], 1.0 / batchSize);
         deltab[l] = MatrixMath.Multiply(deltab[l], 1.0 / batchSize);
         W[l]      = MatrixMath.Subtract(W[l], MatrixMath.Multiply(MatrixMath.Add(deltaW[l], MatrixMath.Multiply(W[l], weightDecay)), learningRate));
         b[l]      = MatrixMath.Subtract(b[l], MatrixMath.Multiply(deltab[l], learningRate));
     }
 }
示例#2
0
文件: Program.cs 项目: Muhaeb/TestNN
 private static void Accumulate()
 {
     for (int l = 0; l <= L; l++)
     {
         for (int k = 0; k < batchSize; k++)
         {
             deltaW[l] = MatrixMath.Add(deltaW[l], acc_dW[k][l]);
             deltab[l] = MatrixMath.Add(deltab[l], acc_db[k][l]);
         }
     }
 }
示例#3
0
文件: Program.cs 项目: Muhaeb/TestNN
 private static void Forward()
 {
     for (int l = 0; l < L; l++)
     {
         Z[l + 1] = MatrixMath.Add(MatrixMath.Multiply(W[l], A[l]), b[l]);
         A[l + 1] = MatrixMath.F(Z[l + 1], layerFunctions[l + 1], epsilonLeaky, false); //false mean apply function, true means apply function derivative
     }
     Z[L + 1] = MatrixMath.Add(MatrixMath.Multiply(W[L], A[L]), b[L]);                  //last layer pre-activation
     if (criterion == "MSE")                                                            //last layer activation
     {
         A[L + 1] = Z[L + 1].Clone();
     }
     else if (criterion == "SoftMax")
     {
         A[L + 1] = new Matrix(10, 1);
         double Denom = MatrixMath.SumExp(Z[L + 1]);
         for (int c = 0; c < Z[L + 1].Rows; c++)
         {
             A[L + 1][c, 0] = Math.Exp(Z[L + 1][c, 0]) / Denom;
         }
     }
 }