public static double[,] Multiply(Matrixs a, Matrixs b) { int m = a.Rows(); int n = a.Cols(); int p = b.Rows(); int q = b.Cols(); if (n == p) { double[,] c = new double[m, q]; for (var i = 0; i < m; i++) { for (var j = 0; j < q; j++) { c[i, j] = 0; for (int k = 0; k < n; k++) { c[i, j] += a.Data()[i, k] * b.Data()[k, j]; } } } return(c); } else { throw new System.ArgumentException("Matrices dimensions do not match for multiply", "matrix"); } }
public static Matrixs Subtract(Matrixs a, Matrixs b) { if (SubtractableDimensions(a, b)) { var result = new Matrixs(a.Rows(), b.Cols()); if (b.Rows() >= 1 && b.Cols() >= 1) { for (var i = 0; i < a.Rows(); i++) { for (var j = 0; j < a.Cols(); j++) { result.data[i, j] = a.data[i, j] - b.data[i, j]; } } return(result); } else { for (var i = 0; i < a.Rows(); i++) { for (var j = 0; j < a.Cols(); j++) { result.data[i, j] = a.data[i, j] - b.data[0, 0]; } } return(result); } } else { throw new System.ArgumentException("Matrices dimensions do not match for substraction"); } }
public static Matrixs FromArray(int [] array) { var m = new Matrixs(array.GetLength(0), 1); for (int i = 0; i < array.GetLength(0); i++) { m.Data()[i, 0] = array[i]; } return(m); }
public void Add(Matrixs n) { for (var i = 0; i < Rows(); i++) { for (var j = 0; j < Cols(); j++) { matrix[i, j] += n.Data()[i, j]; } } }
private static bool SubtractableDimensions(Matrixs a, Matrixs b) { if (a.Rows() == b.Rows() && a.Cols() == b.Cols()) { return(true); } else { return(false); } }
public static double[] ToArray(Matrixs matrix) { double[] arr = new double[matrix.Rows() + matrix.Cols()]; for (int i = 0; i < matrix.Rows(); i++) { for (int j = 0; j < matrix.Cols(); j++) { arr[i] = matrix.Data()[i, j]; } } return(arr); }
public static Matrixs Transpose(Matrixs matrix) { var result = new Matrixs(matrix.Cols(), matrix.Rows()); for (var i = 0; i < matrix.Rows(); i++) { for (var j = 0; j < matrix.Cols(); j++) { result.data[j, i] += matrix.data[i, j]; } } return(result); }
public static void DisplayMatrix(Matrixs a) { int m = a.Rows(); int n = a.Cols(); for (int i = 0; i < m; i++) { for (int j = 0; j < n; j++) { Console.Write(a.Data()[i, j] + " "); } Console.WriteLine(); } }
public static Matrixs Map(Matrixs matrix, Func <double, double> f) { var result = new Matrixs(matrix.Rows(), matrix.Cols()); //Console.WriteLine("Rows(): " + Rows() + " Cols(): " + Cols()); for (int i = 0; i < matrix.Rows(); i++) { for (int j = 0; j < matrix.Cols(); j++) { double val = matrix.data[i, j]; result.data[i, j] = f(val); } } return(result); }
public void Add(Matrixs n) { if (SubtractableDimensions(data, n.data)) { for (var i = 0; i < Rows(); i++) { for (var j = 0; j < Cols(); j++) { data[i, j] += n.data[i, j]; } } } else { throw new System.ArgumentException("Matrices dimensions do not match for substraction"); } }
public static Matrixs Add(Matrixs a, Matrixs b) { if (SubtractableDimensions(a, b)) { for (var i = 0; i < a.Rows(); i++) { for (var j = 0; j < a.Cols(); j++) { a.Data()[i, j] += b.Data()[i, j]; } } return(a); } else { throw new System.ArgumentException("Matrices dimensions do not match for substraction"); } }
public NeuralNetwork(int input_nodes, int hidden_nodes, int output_nodes) { this.input_nodes = input_nodes; this.hidden_nodes = hidden_nodes; this.output_nodes = output_nodes; weights_ih = new Matrixs(this.hidden_nodes, this.input_nodes); weights_ho = new Matrixs(this.output_nodes, this.hidden_nodes); weights_ih.Randomize(); weights_ho.Randomize(); bias_h = new Matrixs(this.hidden_nodes, 1); bias_o = new Matrixs(this.output_nodes, 1); bias_h.Randomize(); bias_o.Randomize(); learningRate = 0.1; }
public static Matrixs Multiply(Matrixs a, Matrixs b) { int m = a.Rows(); int n = a.Cols(); int p = b.Rows(); int q = b.Cols(); if (n == p) { Matrixs c = new Matrixs(m, q); for (var i = 0; i < m; i++) { for (var j = 0; j < q; j++) { c.data[i, j] = 0; for (var k = 0; k < n; k++) { c.data[i, j] += a.data[i, k] * b.data[k, j]; } } } return(c); } else if (m == p && n == q) // Element wise multiplication { Matrixs c = new Matrixs(m, q); for (var i = 0; i < m; i++) { for (var j = 0; j < q; j++) { c.data[i, j] = a.data[i, j] * b.data[i, j]; } } return(c); } else { throw new System.ArgumentException("Matrices dimensions do not match for multiply", "matrix"); } }
public double[,] FeedForward(double[] inputArray) { // Generating hidden outputs var inputs = Matrixs.FromArray(inputArray); var hidden = Matrixs.Multiply(weights_ih, inputs); hidden.Add(bias_h); // Activation function hidden.Map(Sigmoid); // Generating output's output var output = Matrixs.Multiply(weights_ho, hidden); output.Add(bias_o); output.Map(Sigmoid); Console.WriteLine("Results: " + output.data[0, 0]); //+ " " + Math.Round(output.data[0, 0])); // Sending back the results return(output.data); }
public static void DisplayMatrix(Matrixs a) { DisplayMatrixLogic(a.data); }
public double[,] Train(double[] inputArray, double[] targetsArray) { var inputs = Matrixs.FromArray(inputArray); var targets = Matrixs.FromArray(targetsArray); // Generating hidden outputs var hidden = Matrixs.Multiply(weights_ih, inputs); hidden.Add(bias_h); // Activation function hidden.Map(Sigmoid); // Generating output's output var outputs = Matrixs.Multiply(weights_ho, hidden); outputs.Add(bias_o); // Activation function outputs.Map(Sigmoid); // Convert array to matrix object var outputErrors = Matrixs.Subtract(targets, outputs); // Using stochastic gradient descent // Calculate gradient var gradients = Matrixs.Map(outputs, DSigmoid); //outputs.Map(DSigmoid); gradients.Multiply(outputErrors.data); // Element wise multuplication gradients.Multiply(learningRate); // Calculate deltas var hidden_T = Matrixs.Transpose(hidden); var weights_ho_deltas = Matrixs.Multiply(gradients, hidden_T); // Adjust the weights by deltas weights_ho.Add(weights_ho_deltas); // Adjust the bias by its deltas bias_o.Add(gradients); // Calculate the error // ERROR = TARGETS - OUTPUTS // Calculate the hidden layer errors var weights_ho_t = Matrixs.Transpose(weights_ho); var hidden_errors = Matrixs.Multiply(weights_ho_t, outputErrors); // Calculate the hidden gradient var hidden_gradient = Matrixs.Map(hidden, DSigmoid); hidden_gradient.Multiply(hidden_errors.data); hidden_gradient.Multiply(learningRate); // Calculate input->hidden deltas var inputs_T = Matrixs.Transpose(inputs); var weight_ih_deltas = Matrixs.Multiply(hidden_gradient, inputs_T); // Adjust the weights by deltas weights_ih.Add(weight_ih_deltas); // Adjust the bias by its deltas bias_h.Add(hidden_gradient); /* * Console.Write("Inputs: "); * Matrixs.DisplayMatrix(inputArray); * Console.Write("Targets: "); * Matrixs.DisplayMatrix(targetsArray); * Console.Write("Errors: "); * Matrixs.DisplayMatrix(outputErrors); * Console.WriteLine(); * Console.WriteLine(); */ return(outputs.data); }