public void DenseLayer_Backward() { const int fanIn = 5; const int batchSize = 2; const int neuronCount = 3; var random = new Random(232); var sut = new DenseLayer(neuronCount, Activation.Undefined); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, neuronCount, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { 0.001748383f, -0.2615477f, -0.6422306f, -0.01443626f, 0.4605991f, -0.7384186f, -0.6931117f, 0.1083627f, -0.6230267f, -1.20742f }); MatrixAsserts.AreEqual(expected, actual); }
public void DenseLayer_CopyLayerForPredictionModel() { var batchSize = 1; var random = new Random(232); var neuronCount = 5; var sut = new DenseLayer(neuronCount, Activation.Undefined); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var layers = new List <ILayer>(); sut.CopyLayerForPredictionModel(layers); var actual = (DenseLayer)layers.Single(); Assert.AreEqual(sut.Width, actual.Width); Assert.AreEqual(sut.Height, actual.Height); Assert.AreEqual(sut.Depth, actual.Depth); MatrixAsserts.AreEqual(sut.Weights, actual.Weights); MatrixAsserts.AreEqual(sut.Bias, actual.Bias); Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount); Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount); }
public void DenseLayer_MultipleBackwardsPasses() { const int fanIn = 5; const int batchSize = 2; const int neuronCount = 3; var random = new Random(232); var sut = new DenseLayer(neuronCount, Activation.Undefined); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Dense(batchSize, neuronCount, 1.0f); var expected = Matrix <float> .Build.Dense(batchSize, fanIn); sut.Backward(delta).CopyTo(expected); for (int i = 0; i < 20; i++) { var actual = sut.Backward(delta); Assert.AreEqual(expected, actual); } }
public void EvaluateTest() { var layer = new DenseLayer(6, 3, new IdentityActivation(), new Distance()); layer.Initialize(); var input = NNArray.Random(6); layer.Evaluate(input); }
public void DenseLayer_Forward() { const int fanIn = 5; const int batchSize = 2; const int neuronCount = 3; var random = new Random(232); var sut = new DenseLayer(neuronCount, Activation.Undefined); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Forward(input); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, neuronCount, new float[] { 0.9898463f, 0.4394523f, 0.4259368f, -1.051275f, -0.5012454f, 0.08094172f }); MatrixAsserts.AreEqual(expected, actual); }
public void LinearRegressionTest() { // y = ax + b double a = 1, b = -2; int count = 20; double[] input = new double[count]; double[] expectedOutput = new double[count]; for (int i = 1; i < count; i++) { input[i] = i; expectedOutput[i] = a * i + b; } var layer = new DenseLayer(1, 1, new IdentityActivation(), new Distance()); layer.Initialize(); layer.Biases[0] = 0; layer.Weights[0, 0] = 2; int epoc = 0; double error = 100; while (++epoc < 10000 && error > 0.01) { error = layer.Train(new double[] { input[1] }, new double[] { expectedOutput[1] }, 0.01); error = layer.Train(new double[] { input[2] }, new double[] { expectedOutput[2] }, 0.01); error = layer.Train(new double[] { input[3] }, new double[] { expectedOutput[3] }, 0.01); } for (int n = 0; n < 20; n++) { for (int i = 1; i < 20; i++) { } } double bias = layer.Biases[0]; double coef = layer.Weights[0, 0]; }