コード例 #1
0
ファイル: Program.cs プロジェクト: Muhaeb/TestNN
        private static void ReportPerf(StreamWriter writer, List <DigitImage> data, int epoch, string whichPerformance)
        {
            var    confusionMatrix = new Matrix(layerSizes[L + 1], layerSizes[L + 1]);
            double accuracy        = 0;

            for (int k = 0; k < data.Count; k++)
            {
                var loss_bak = (double[])Loss.Clone();
                Select(data, k);
                Forward();
                var maxPred = MatrixMath.Max(A[L + 1]);
                var label   = (int)data[k].label;
                confusionMatrix[label, maxPred.Item1]++;
            }
            accuracy = MatrixMath.Trace(confusionMatrix) / data.Count;
            writer.WriteLine(whichPerformance + " Performance at Epoch: " + epoch);
            writer.WriteLine(whichPerformance + " Confusion Matrix: ");
            writer.WriteLine(confusionMatrix);
            writer.WriteLine(whichPerformance + " Accuracy: " + accuracy);
        }
コード例 #2
0
ファイル: Program.cs プロジェクト: Muhaeb/TestNN
        private static void Backward(int k)
        {
            errors[L + 1] = MatrixMath.Subtract(A[L + 1], label); //error at output layer neurons
            for (int l = L; l >= 1; l--)                          // error at hidden layers neurons e[l] = W[l].Transpose * e[l+1]) . F'(Z[l])    where * is matrix multiplication and . is hadamard multiplication
            {
                errors[l] = MatrixMath.HadamardProduct(MatrixMath.Multiply(MatrixMath.Transpose(W[l]), errors[l + 1]), MatrixMath.F(Z[l], layerFunctions[l], epsilonLeaky, true));
            }
            for (int l = 0; l <= L; l++)
            {
                dW[l] = MatrixMath.Multiply(errors[l + 1], MatrixMath.Transpose(A[l]));
                db[l] = errors[l + 1];
            }

            for (int l = 0; l <= L; l++)
            {
                acc_dW[k][l] = dW[l].Clone();
                acc_db[k][l] = db[l].Clone();
                //deltaW[l] = MatrixMath.Add(deltaW[l], dW[l]);
                //deltab[l] = MatrixMath.Add(deltab[l], db[l]);
            }
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: Muhaeb/TestNN
 private static void Forward()
 {
     for (int l = 0; l < L; l++)
     {
         Z[l + 1] = MatrixMath.Add(MatrixMath.Multiply(W[l], A[l]), b[l]);
         A[l + 1] = MatrixMath.F(Z[l + 1], layerFunctions[l + 1], epsilonLeaky, false); //false mean apply function, true means apply function derivative
     }
     Z[L + 1] = MatrixMath.Add(MatrixMath.Multiply(W[L], A[L]), b[L]);                  //last layer pre-activation
     if (criterion == "MSE")                                                            //last layer activation
     {
         A[L + 1] = Z[L + 1].Clone();
     }
     else if (criterion == "SoftMax")
     {
         A[L + 1] = new Matrix(10, 1);
         double Denom = MatrixMath.SumExp(Z[L + 1]);
         for (int c = 0; c < Z[L + 1].Rows; c++)
         {
             A[L + 1][c, 0] = Math.Exp(Z[L + 1][c, 0]) / Denom;
         }
     }
 }
コード例 #4
0
ファイル: Program.cs プロジェクト: Muhaeb/TestNN
 private static void ReportBatch(StreamWriter writer, int i, int j)
 {
     writer.WriteLine("=====>  Training Loss after Batch " + (j / batchSize + 1) + " on Epoch " + i + ": " + (Loss[i] /= Training.Count));
     for (int l = 0; l <= L; l++)
     {
         writer.WriteLine("\r\nEpoch " + (i + 1) + " Batch " + (j / batchSize + 1) + " Layer " + (l + 1) + " Weights Gradients Norm:" + MatrixMath.PNorm(deltaW[l], 2));
         writer.WriteLine("Epoch " + (i + 1) + " Batch " + (j / batchSize + 1) + " Layer " + (l + 1) + " Biases Gradients Norm:" + MatrixMath.PNorm(deltab[l], 2) + "\r\n");
     }
 }
コード例 #5
0
ファイル: Program.cs プロジェクト: Muhaeb/TestNN
 private static void Update()
 {
     for (int l = 0; l <= L; l++)
     {
         deltaW[l] = MatrixMath.Multiply(deltaW[l], 1.0 / batchSize);
         deltab[l] = MatrixMath.Multiply(deltab[l], 1.0 / batchSize);
         W[l]      = MatrixMath.Subtract(W[l], MatrixMath.Multiply(MatrixMath.Add(deltaW[l], MatrixMath.Multiply(W[l], weightDecay)), learningRate));
         b[l]      = MatrixMath.Subtract(b[l], MatrixMath.Multiply(deltab[l], learningRate));
     }
 }