public override double Forward(Matrix Actual, Matrix Expected, MatrixData data, int layerCount) { double error = 0.0; if (Actual.rows != Expected.rows || Actual.cols != Expected.cols) { throw new MatrixException("Actual does not have the same size as Expected"); } double regularizationValue = 0.0; for (int i = 0; i < layerCount; i++) { regularizationValue = RegularizationFunction.CalculateNorm(data.Data["W" + i.ToString()]); } for (int i = 0; i < Actual.rows; i++) { for (int j = 0; j < Actual.cols; j++) { error += Expected[i, j] * Math.Log(Expected[i, j] / Actual[i, j]); } } error += regularizationValue; BatchCost += error; return(error); }
public override Matrix Forward(Matrix inputs) { Params["W"].InMap(MutationFunction.Mutate); // Calculate Regularization Value On W and B RegularizationValue = (float)RegularizationFunction.CalculateNorm(Params["W"]); // Calculate Feed Forward Operation Params["X"] = inputs; Params["Z"] = Params["W"] * Params["X"] + Params["B"]; Params["A"] = ActivationFunction.Forward(Params["Z"]); return(Params["A"]); }
public override Matrix Backward(Matrix Actual, Matrix Expected, MatrixData data, int layerCount) { double error = 0.0; if (Actual.rows != Expected.rows || Actual.cols != Expected.cols) { throw new MatrixException("Actual Matrix does not have the same size as The Expected Matrix"); } double regularizationValue = 0.0; for (int i = 0; i < layerCount; i++) { regularizationValue = RegularizationFunction.CalculateNorm(data.Data["W" + i.ToString()]); } for (int i = 0; i < Actual.rows; i++) { for (int j = 0; j < Actual.cols; j++) { error += Math.Pow((Actual[i, j] - Expected[i, j]), 2); } } error /= Tao; error = Math.Exp(error); error *= Tao; Matrix gradMatrix = Actual.Duplicate(); for (int i = 0; i < Actual.rows; i++) { for (int j = 0; j < Actual.cols; j++) { gradMatrix[i, j] = (Actual[i, j] - Expected[i, j]) * error; } } return(gradMatrix); }