public void TrainingLayer() { double[][] inputs = new double[][] { new double[] { 0, 1, 0, 1 } }; double[][] outputs = new double[][] { new double[] { 0, 1, 0 } }; var layer = new DenseLayer(inputs[0].Length, outputs[0].Length, new Relu(), new Distance()); for (int iter = 0; iter < 20; iter++) { // Generate input data for (int i = 0; i < inputs[0].Length; i++) { inputs[0][i] = rnd.NextDouble(); } // Train network int step = 0; double error = double.MaxValue; double errorTarget = 0.0001; while (step < 1000 && error > errorTarget) { error = layer.Train(inputs[0], outputs[0], 0.05); step++; } Console.WriteLine($"Iteration: {iter} Step: {step} Error: {error}"); } }
public void LinearRegressionTest() { // y = ax + b double a = 1, b = -2; int count = 20; double[] input = new double[count]; double[] expectedOutput = new double[count]; for (int i = 1; i < count; i++) { input[i] = i; expectedOutput[i] = a * i + b; } var layer = new DenseLayer(1, 1, new IdentityActivation(), new Distance()); layer.Initialize(); layer.Biases[0] = 0; layer.Weights[0, 0] = 2; int epoc = 0; double error = 100; while (++epoc < 10000 && error > 0.01) { error = layer.Train(new double[] { input[1] }, new double[] { expectedOutput[1] }, 0.01); error = layer.Train(new double[] { input[2] }, new double[] { expectedOutput[2] }, 0.01); error = layer.Train(new double[] { input[3] }, new double[] { expectedOutput[3] }, 0.01); } for (int n = 0; n < 20; n++) { for (int i = 1; i < 20; i++) { } } double bias = layer.Biases[0]; double coef = layer.Weights[0, 0]; }