public void DenseLayer_Backward() { const int fanIn = 5; const int batchSize = 2; const int neuronCount = 3; var random = new Random(232); var sut = new DenseLayer(neuronCount, Activation.Undefined); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, neuronCount, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { 0.001748383f, -0.2615477f, -0.6422306f, -0.01443626f, 0.4605991f, -0.7384186f, -0.6931117f, 0.1083627f, -0.6230267f, -1.20742f }); MatrixAsserts.AreEqual(expected, actual); }
public void MaxPool2DLayer_Forward_2() { const int inputWidth = 4; const int inputHeight = 4; const int inputDepth = 1; const int fanIn = inputWidth * inputHeight * inputDepth; const int fanOut = 2 * 2 * 1; const int batchSize = 1; // Create layer const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, new Random(232)); var inputData = new float[] { 3, 0, 0, 6, 0, 2, 3, 0, 0, 8, 10, 0, 4, 6, 0, 7 }; var input = Matrix <float> .Build.Dense(batchSize, fanIn, inputData); Trace.WriteLine(input.ToString()); var actual = sut.Forward(input); var expected = Matrix <float> .Build.Dense(batchSize, fanOut, new float[] { 3, 6, 8, 10 }); MatrixAsserts.AreEqual(expected, actual); }
public void DenseLayer_CopyLayerForPredictionModel() { var batchSize = 1; var random = new Random(232); var neuronCount = 5; var sut = new DenseLayer(neuronCount, Activation.Undefined); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var layers = new List <ILayer>(); sut.CopyLayerForPredictionModel(layers); var actual = (DenseLayer)layers.Single(); Assert.AreEqual(sut.Width, actual.Width); Assert.AreEqual(sut.Height, actual.Height); Assert.AreEqual(sut.Depth, actual.Depth); MatrixAsserts.AreEqual(sut.Weights, actual.Weights); MatrixAsserts.AreEqual(sut.Bias, actual.Bias); Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount); Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount); }
public void MaxPool2DLayer_Backward() { const int inputWidth = 20; const int inputHeight = 20; const int inputDepth = 2; const int fanIn = inputWidth * inputHeight * inputDepth; const int fanOut = 10 * 10 * 2; const int batchSize = 1; var random = new Random(32); // Create layer const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, fanOut, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(actual.ToString()); Trace.WriteLine(string.Join(",", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0.8085009f, 0f, 0f, 0f, -0.01437248f, 0f, 0f, 1.041877f, 0f, 0f, 0f, 0f, 0f, 0.4979768f, 0f, -0.5938089f, 0f, -0.9181094f, 0f, -1.900491f, 0f, 0f, 0f, -0.9150107f, 0f, 0f, 0f, 0f, 0f, -0.4453017f, -0.4299661f, 0f, 0f, 0f, 0f, 0.9363991f, 0f, 0f, -0.4949541f, 0f, 0f, 0f, 0.1292399f, 0f, 0f, 0f, -0.9616904f, 0f, 0f, 0f, 0f, 0f, -1.287248f, 0f, 0f, 0f, 0.2155272f, 0f, 0f, 0f, -1.007965f, 0f, 0f, 0f, 0f, 1.076965f, 0f, 0f, -1.401237f, 0f, -1.244568f, 0f, 0f, 0f, 0f, -0.478288f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.6055867f, -0.1095726f, 0f, 0.3003371f, 0f, 0f, 0f, 0f, 0f, -0.3044865f, 0f, 0f, 0f, 0f, 0.8818393f, 0f, -0.4136987f, 0.4168211f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.1715285f, 0f, 0.3923124f, 0.2809646f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.5905057f, 0f, -0.09473711f, 0f, 0.3884106f, 0f, 0f, -0.4212746f, 0f, 0f, 0f, 0f, -0.9300717f, 0f, -1.464727f, -0.1085227f, 0f, 0f, 1.515902f, 0f, 0f, 0f, 0f, 0f, 0f, 1.3771f, 0f, 0f, 0f, 0f, 0f, 0f, 0.1722498f, 0f, 0f, 0.7326968f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.256293f, 0f, 0.005195214f, 0f, 0f, -1.809731f, 0f, 0f, 0f, 0.5915539f, 0f, 0f, -0.4030921f, 0f, -0.8363255f, -0.2891991f, 0f, -0.9076035f, 0f, 0f, 0f, 0f, 1.067826f, 1.14113f, 0f, 0f, -1.372615f, 0f, 0f, 0.02314425f, 0f, 0f, 0f, 0f, -0.5714836f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -1.125379f, 0f, 0f, 0f, 0f, 1.118231f, 0f, 0f, 0.2472252f, 0f, -0.7428527f, 0f, -1.040836f, 0f, 0f, 0f, 0.06274998f, 0f, 0.6431293f, 0f, -0.3932301f, 0f, 0f, -1.111612f, 0f, 0.7901574f, 0f, 1.980336f, 0f, 0f, 0f, 0f, 0.5354787f, 0f, 0f, 0.7546993f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -2.181903f, 0f, 0.08855283f, 0f, 0f, 0f, 0f, 0f, 1.346654f, 0f, 0f, 0.4436988f, 0f, -2.124305f, 0f, 0f, 0f, 0f, 1.103836f, 0f, -0.2725285f, 0.1360921f, 0f, 1.000088f, 0f, 0.932502f, 0f, 0f, 0f, 0.1138889f, 0f, 0f, 0f, 0f, 0f, -0.1304505f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.4008962f, 0f, 0f, 0f, -1.161332f, 0f, 0f, 0.3786051f, 0.2474999f, 0f, 0f, 0f, 0f, 0.885915f, 0f, 0f, -0.2077033f, 0f, 0f, 0f, 0f, 0f, -1.774759f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.3795716f, 0f, 0f, -1.311509f, 0f, 0f, 0f, 0f, -1.585828f, 0.5753101f, 0f, 0f, 0f, 0f, 0f, -0.978768f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.1397521f, 0f, 0f, 0f, 2.956711f, 0f, 0f, 0f, 0f, -1.591264f, 0f, 0.5886439f, 0f, 0f, -1.348895f, 0f, 0f, 2.115214f, 0f, -0.2732723f, 0f, 0f, 0f, -0.3678305f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -1.048669f, 1.061424f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -1.046116f, 0f, 0f, -0.4922836f, 0f, -1.362494f, 0f, 1.456946f, -0.2943774f, 0f, 0f, 0f, 0f, 0f, -0.6920757f, 0f, 0f, -0.8034356f, 0.8028942f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0.7852993f, 0f, 0f, 0f, 0.4411084f, 0f, 0.438746f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0.03683103f, 0f, 0f, 0.671003f, 0f, 0f, 0f, 0.6490055f, 0f, 0f, 0f, 0f, -0.4582749f, 0f, 0f, 0.1131398f, -1.270652f, 0f, -2.803502f, 0f, 0f, 0f, 0f, 0.4446304f, 0f, 0.3837125f, -0.6822397f, 0f, 0f, 0f, 0.090445f, 0f, -2.116256f, 0f, 0f, -1.008349f, 0f, 0f, 0f, 0f, -1.282366f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.3974761f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.6379933f, 0f, 0.3958001f, 1.004088f, 0f, 0f, 0f, 0f, 0f, 0f, -3.557195f, 0f, 0f, 0f, 0f, 0f, 0f, -1.416259f, 0f, 0.8337035f, 0f, 0f, 0f, 0f, 0f, 1.234198f, 0f, 0f, 1.57467f, 0f, 0f, -1.000447f, 0f, -0.2661186f, 0f, 1.048688f, 0f, 0f, 0f, 0f, 0f, 0f, 1.26955f, 0f, 0f, 0f, 0f, -1.462413f, 0f, 0f, 0.9360681f, 0.6391365f, 0f, 0f, 0f, 0f, 0f, 0f, -0.03548741f, 0f, 0.1278973f, 0f, -0.4136214f, 0f, 0.9968569f, -0.07145377f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 1.252816f, 0f, -0.9959089f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -2.005155f, 0f, 0f, -0.8804028f, 0f, 0f, 0f, 0f, 0f, 1.159981f, 0f, 0f, 0f, 0.8770596f, 0f, 0f, 0.3886716f, 0.5398855f, 0f, 0f, 0f, 1.165788f, 0f, 0f, 0f, 0f, -0.4803754f, -0.02129833f, 0f, 0f, 0f, 1.804181f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0.3445181f, 0f, 0.702647f, 0f, 0.9873983f, 0f, 2.234645f, 0f, 0f, -0.9068208f, 0f, 0f, 0f, 0f, -0.5695083f, 0f, 0f, -0.1133842f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 2.310154f, 0f, 0f, -0.01837711f, 0f, 0f, 0f, 0f, -1.367691f, 0f, 0f, 0f, 2.204792f, 0f, 0f, -0.168677f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 1.706425f, -0.627474f, 0f, 0f, 0.01406403f, 0f, 0f, -0.9384971f, 0f, 0f, 0f, 0f, -0.7298944f, 0f, -0.03289218f, 0f, -0.7163599f, 0.9871746f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0.9926041f, 0f, 0f, 0f, 0f, 1.05477f, 0f, -0.1432948f, 0f, 0f, 0f, 0f, -0.8373516f, 0f, 0f, 0f, -0.02648427f, 0f, 0f, 0f, 0f, 1.125633f, 0f, 0f, -0.1470989f, 0f, 0f, 0f, 0f, 0.7238355f, 0f, 0f, 0f, -1.240024f, 0f, 0f, 0f, 0f, 1.452529f, -0.2726488f, 0f, 0f, -0.5126494f, 0f, -0.6268897f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -0.8481783f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, 0f, -1.281046f, 0f, 0f, -1.109109f, 0.5653794f, 0f, 0f, -0.7675006f, 0f, -0.6390485f, 0f, 0f, -1.11143f, 0f }); MatrixAsserts.AreEqual(expected, actual); }
public void SquaredErrorRegressionLayer_Backward() { var batchSize = 1; var width = 28; var height = 28; var depth = 3; var numberOfClasses = 10; var random = new Random(232); var sut = new SquaredErrorRegressionLayer(numberOfClasses); sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, numberOfClasses, new float[] { -0.3353941f, 0.0191406f, -0.9314069f, 1.202553f, 1.69809f, -1.126425f, 1.06249f, 0.06901796f, -1.057676f, -0.5987452f }); MatrixAsserts.AreEqual(expected, actual); }
public void MaxPool2DLayer_Forward() { const int inputWidth = 20; const int inputHeight = 20; const int inputDepth = 2; const int fanIn = inputWidth * inputHeight * inputDepth; const int fanOut = 10 * 10 * 2; const int batchSize = 1; var random = new Random(32); // Create layer const int width = 2; const int height = 2; var sut = new MaxPool2DLayer(width, height); sut.Initialize(inputWidth, inputHeight, inputDepth, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Forward(input); Trace.WriteLine(actual.ToString()); Trace.WriteLine(string.Join(",", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, fanOut, new float[] { 1.737387f, 0.8429711f, 1.403809f, 0.7437097f, 2.154061f, 1.737234f, 0.9562361f, 0.1203717f, 1.253413f, 0.7093143f, 1.521145f, 1.494283f, 1.988454f, 2.392222f, 1.740699f, 0.7398372f, 0.9477737f, 1.73438f, 1.367192f, 0.7603366f, 1.583967f, 0.3866753f, 1.930486f, 1.501988f, -0.2519213f, 0.9616809f, 0.8507621f, 0.5222243f, 0.528969f, 1.083474f, 0.5110471f, 1.111015f, 0.4116569f, 1.012139f, 1.541237f, 1.286736f, 0.8889436f, 0.6083445f, 1.407371f, 1.033507f, 0.2372739f, 1.175704f, 0.3457479f, 0.3563888f, 0.4308063f, 2.15408f, 1.019059f, 1.69062f, 0.5580661f, 0.9991792f, 0.8225476f, 0.1575469f, 1.119048f, -0.03910597f, 1.736111f, 0.7009985f, 0.1849347f, 1.268318f, 1.533113f, 0.891203f, 0.7703907f, 0.7964001f, 2.104593f, 3.125018f, 0.4306072f, 1.297616f, 0.8612604f, 1.569523f, 1.496838f, 0.7015814f, 0.7657425f, 0.8277726f, 0.3020416f, 1.502974f, 0.9276986f, 0.9929756f, 0.9644111f, 0.7933079f, 0.9039445f, 0.4037939f, 0.6111527f, -0.02752473f, 0.8821087f, 1.149586f, 0.2484843f, 0.8898949f, 1.909704f, 1.046652f, 1.395888f, 1.341396f, 3.130147f, 1.424874f, -0.1669227f, 1.688097f, 1.319619f, 0.08981451f, 1.955076f, 1.188523f, 0.9187648f, 1.701037f, 1.126729f, 0.6088547f, 1.249962f, 1.904854f, 1.216359f, 0.8841643f, 0.9773607f, 0.5250032f, 2.041504f, 1.75729f, 0.2925639f, 1.233287f, 0.6095849f, 0.9424562f, 1.445586f, 0.5931875f, 1.458192f, 0.4289872f, 0.5092088f, 1.496163f, 1.205378f, 1.003089f, -0.5055257f, 0.9426916f, 1.97264f, 1.179588f, 1.628175f, 2.082574f, 0.478283f, 0.6607473f, 1.860639f, 1.452383f, 2.17662f, 1.086674f, 2.466586f, 0.1421053f, 1.238979f, 0.8957624f, 0.6944376f, 1.249352f, 0.7237418f, 3.043795f, 1.631333f, 0.7378432f, 0.6678889f, 1.090085f, 1.857423f, 1.000153f, 1.650252f, 1.500757f, 2.024655f, 0.9628305f, 0.8909977f, -0.7175303f, 2.396366f, 1.028608f, 0.7338257f, 0.9764791f, 0.5674692f, 1.814738f, 0.7745261f, 0.5802411f, 0.142071f, 0.9685609f, 0.05501625f, 1.262817f, 0.9647988f, 1.111344f, -0.2743198f, 1.031065f, 0.8540451f, 0.633197f, 0.8172408f, -0.6463516f, 0.6572174f, 0.5348259f, 0.4829673f, 0.7212811f, 0.9138665f, 1.560033f, 1.193395f, 0.6193073f, 0.4542928f, 2.111476f, 0.7224295f, 0.2179742f, 0.3198487f, 1.163711f, 1.428939f, 1.220046f, 0.1001558f, 0.7708471f, 1.356724f, 0.3361169f, -0.3378747f, 1.28403f, 0.6157113f, 1.262698f, 1.797522f, 1.135491f }); MatrixAsserts.AreEqual(expected, actual); }
public void BatchNormalizationLayer_CopyLayerForPredictionModel() { var batchSize = 1; var sut = new BatchNormalizationLayer(); sut.Initialize(3, 3, 1, batchSize, Initialization.GlorotUniform, new Random(232)); var layers = new List <ILayer>(); sut.CopyLayerForPredictionModel(layers); var actual = (BatchNormalizationLayer)layers.Single(); Assert.AreEqual(sut.Width, actual.Width); Assert.AreEqual(sut.Height, actual.Height); Assert.AreEqual(sut.Depth, actual.Depth); MatrixAsserts.AreEqual(sut.Scale, actual.Scale); MatrixAsserts.AreEqual(sut.Bias, actual.Bias); MatrixAsserts.AreEqual(sut.MovingAverageMeans, actual.MovingAverageMeans); MatrixAsserts.AreEqual(sut.MovingAverageVariance, actual.MovingAverageVariance); Assert.IsNull(actual.BatchColumnMeans); Assert.IsNull(actual.BatchcolumnVars); Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount); Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount); }
public void BatchNormalizationLayer_Backward() { const int fanIn = 4; const int batchSize = 2; var random = new Random(232); var sut = new BatchNormalizationLayer(); sut.Initialize(1, 1, fanIn, batchSize, Initialization.GlorotUniform, random); var data = new float[] { 0, 1, -1, 1, 0.5f, 1.5f, -10, 10 }; var input = Matrix <float> .Build.Dense(batchSize, fanIn, data); Trace.WriteLine(input.ToString()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { -2.600517E-06f, 2.615418E-06f, -1.349278E-06f, 1.349278E-06f, 1.158319E-06f, -1.150868E-06f, -5.639333E-10f, -9.261829E-10f }); MatrixAsserts.AreEqual(expected, actual); }
public void InputLayer_Backward() { var batchSize = 1; var width = 28; var height = 28; var depth = 3; var random = new Random(232); var fanIn = width * height * depth; var sut = new InputLayer(height, width, depth); sut.Initialize(1, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Backward(delta); var expected = delta; MatrixAsserts.AreEqual(expected, actual); }
public void SoftMaxLayer_Backward() { var batchSize = 1; var width = 28; var height = 28; var depth = 3; var numberOfClasses = 10; var random = new Random(232); var sut = new SoftMaxLayer(numberOfClasses); sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, numberOfClasses, new float[] { -0.3891016f, -0.6150756f, 0.0618184f, -0.2334358f, 1.544145f, -1.01483f, 0.6160479f, 0.3225261f, -1.007966f, -0.1111263f }); MatrixAsserts.AreEqual(expected, actual); }
public void SvmLayer_Backward() { var batchSize = 1; var width = 28; var height = 28; var depth = 3; var numberOfClasses = 10; var random = new Random(232); var sut = new SvmLayer(numberOfClasses); sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, numberOfClasses, new float[] { 1f, 1f, 1f, 1f, 1f, -9f, 1f, 1f, 1f, 1f }); MatrixAsserts.AreEqual(expected, actual); }
public void BatchNormalizationLayer_Forward_SpatialInput() { var batchSize = 2; var filterHeight = 2; var filterWidth = 2; var filterDepth = 2; var stride = 1; var padding = 0; var inputWidth = 3; var inputHeight = 3; var inputDepth = 3; var filterGridWidth = ConvUtils.GetFilterGridLength(inputWidth, filterWidth, stride, padding, BorderMode.Valid); var filterGridHeight = ConvUtils.GetFilterGridLength(inputHeight, filterHeight, stride, padding, BorderMode.Valid); var k = filterDepth; var input = new float[] { 111, 121, 112, 122, 113, 123, 114, 124, 211, 221, 212, 222, 213, 223, 214, 224 }; var convInput = Matrix <float> .Build.Dense(2, 8, input); var rowWiseInput = Matrix <float> .Build.Dense(batchSize, k *filterGridWidth *filterGridHeight); Trace.WriteLine(convInput); ConvUtils.ReshapeConvolutionsToRowMajor(convInput, inputDepth, inputHeight, inputWidth, filterHeight, filterWidth, padding, padding, stride, stride, BorderMode.Valid, rowWiseInput); Trace.WriteLine(rowWiseInput); var sut = new BatchNormalizationLayer(); sut.Initialize(filterGridWidth, filterGridHeight, filterDepth, batchSize, Initialization.GlorotUniform, new Random(232)); var actual = sut.Forward(rowWiseInput); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); Trace.WriteLine(actual); var expected = Matrix <float> .Build.Dense(batchSize, k *filterGridWidth *filterGridHeight, new float[] { -1.0297426f, 0.9697576f, -1.00974762f, 0.9897526f, -0.9897526f, 1.00974762f, -0.9697576f, 1.0297426f, -1.0297426f, 0.9697576f, -1.00974762f, 0.9897526f, -0.9897526f, 1.00974762f, -0.9697576f, 1.0297426f }); MatrixAsserts.AreEqual(expected, actual); }
public void DropoutLayer_Forward() { const int fanIn = 5; var batchSize = 1; var random = new Random(232); var sut = new DropoutLayer(0.5); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Forward(input); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { 0.9177308f, 1.495695f, -0.07688076f, 0f, -2.932818f }); MatrixAsserts.AreEqual(expected, actual); }
public void Conv2DLayer_Forward() { var fanIn = 3 * 3 * 3; var batchSize = 10; var random = new Random(232); var sut = new Conv2DLayer(2, 2, 20); sut.Initialize(3, 3, 3, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Forward(input); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, 80, new float[] { 0.2733753f, 0.3008634f, -0.1208906f, -0.1560806f, 0.4720983f, 0.1964838f, -1.681711f, 0.6287307f, -0.4829925f, -0.4534813f, -1.078468f, -0.1144511f, 0.4659638f, -0.120925f, 0.7934993f, -0.5943723f, -0.7378365f, 0.7652064f, -0.3221518f, -0.2317966f, 0.4495413f, -0.6096761f, 0.6835338f, -0.02226869f, 0.1754346f, -0.04827331f, -0.3343604f, -0.2334335f, 1.108038f, 0.4382075f, 0.2382888f, -0.9064946f, 1.027427f, 0.2528045f, -0.3248654f, -0.2197235f, -0.5500216f, -0.008644295f, 0.2127879f, -0.6656225f, 0.6765251f, 0.007438065f, -0.4837107f, 0.2957363f, -0.1912733f, -0.3805051f, -0.5109532f, -0.7348921f, 0.4601201f, -0.624974f, 0.3109825f, 0.1147998f, -0.1680198f, -0.008044515f, -0.164971f, -0.6899808f, -0.6280832f, 0.408918f, 0.3556627f, 0.7962465f, 0.2898788f, 0.7069184f, -0.6540206f, -0.01752163f, 0.3804853f, -0.04523373f, -1.262877f, 0.1155187f, -0.7645626f, 0.03567174f, -0.124552f, -0.6192967f, -0.4397115f, -0.4685149f, 0.6875528f, 0.3149778f, -0.3675161f, -0.1243756f, -0.6544789f, 1.413235f, 0.678455f, -0.3529964f, -0.4362595f, 0.1791825f, 0.7699821f, -0.5731639f, 0.6010831f, -0.6816397f, 0.8686972f, -1.059315f, -0.2285467f, 0.6755584f, 0.02892968f, 0.319068f, -0.2741399f, -0.1204105f, 0.4589621f, 0.1053525f, 0.736594f, 1.035853f, -0.1306116f, -0.4781589f, 0.2467091f, 0.2892699f, 0.2607583f, -0.1237879f, -0.2832279f, -0.09045184f, -1.161442f, -0.5337377f, -0.3150382f, 0.5083862f, -0.5373044f, 0.7076963f, 0.1481661f, 0.06053496f, -0.0820718f, -0.4352356f, -0.7798166f, 0.6119208f, -0.390282f, -0.7013406f, 0.6147138f, 0.5545185f, -0.4910378f, -0.154389f, -0.3489988f, -0.201867f, 0.772742f, 0.1752536f, 0.3944739f, -0.02384969f, 0.5776925f, 0.9032916f, 0.001158709f, -0.4522734f, -1.26658f, -0.5188892f, -0.0251877f, -0.9274058f, 0.489825f, 0.5262223f, -0.6365691f, 0.4380886f, 0.6616753f, 0.4103517f, 0.5301507f, -0.01217228f, 0.5111409f, 0.1953704f, 0.4073849f, -0.4284102f, -0.130823f, 0.2850339f, 0.7797916f, -0.1639231f, -0.571148f, -0.5122335f, -0.2674513f, 0.06782193f, -0.4610763f, -0.02144742f, 0.1393434f, 0.4351491f, 0.2663933f, 0.2588765f, -0.1313208f, 0.2938686f, 0.002653508f, -0.04700393f, 0.4338523f, -0.2977601f, 0.03504527f, -0.2516565f, -0.03552889f, -0.09390955f, 0.427268f, -0.5191386f, -0.0001365815f, -1.076891f, 0.4125989f, -0.01114923f, 0.5955145f, -0.4187932f, -0.1972622f, 0.5425565f, -0.7445877f, -0.1055266f, 0.7589374f, 0.2915639f, -0.178664f, 0.07911102f, 0.3971482f, 0.4474507f, -0.1177068f, -0.4356861f, -0.786858f, 0.8444468f, -0.05902442f, -0.5578558f, -0.5873198f, -0.8609674f, 0.9937873f, 0.5947806f, -0.1130174f, -0.9119666f, -0.7534832f, -0.2386101f, -0.968057f, -0.6817409f, -0.2436372f, 0.1135369f, 0.09189325f, 0.9541081f, 0.2145254f, -0.5165401f, -0.4706681f, -0.2046867f, -0.5416023f, -0.369747f, 0.2705314f, -0.4124502f, 0.238324f, 0.252971f, 0.5524589f, -0.06847146f, -0.03442992f, -0.5651279f, 0.5809578f, -0.4265114f, 0.2077411f, -0.6539571f, 0.5804203f, 0.1167613f, 0.2217207f, -0.08436611f, -1.275673f, -0.4572622f, -0.5256473f, 0.09231411f, 0.3386289f, 0.3888669f, -0.2211431f, 0.5697915f, 0.5708532f, 0.05645339f, 0.8498666f, -0.4057152f, 0.6115354f, -0.2973527f, 0.3131525f, 0.5053126f, -0.6289777f, -0.3799287f, 0.3264677f, 0.1143538f, 0.5493373f, 0.1070386f, 0.4644426f, 0.3337833f, -0.3994666f, -0.1651549f, 0.02612653f, -0.01993187f, -0.2241323f, 0.2200804f, -0.8829409f, 0.2463858f, -0.5501977f, 0.08142836f, -0.5127226f, 0.4473693f, -0.2990816f, 0.3495808f, -0.1440298f, -0.0273583f, -0.1411249f, 0.1708718f, -0.2012203f, 0.07939593f, 0.7013875f, 0.7025376f, -0.5690992f, -0.6124382f, 0.2390755f, 0.4121238f, -0.3513868f, 0.1976611f, 0.1762613f, 0.1122302f, -1.010151f, -0.09799499f, 0.4842454f, -0.3821876f, 0.07314306f, 0.2689668f, -0.988689f, 0.6737171f, -0.456857f, 0.194019f, 0.2339447f, 0.8238593f, 0.09300315f, 1.044965f, 0.4861881f, -0.5582125f, 0.7182382f, -0.7722781f, 0.5042021f, 0.2387189f, 0.4986543f, -0.5273175f, -0.5619303f, 0.1610949f, -0.04875291f, 0.1139044f, 0.1551733f, -0.492222f, 0.8790537f, 0.02714369f, 0.4946728f, 0.1123615f, 0.01452664f, -0.4977535f, -0.4515235f, 0.5322075f, -0.2322227f, 0.1451528f, 0.2376375f, -0.02630551f, 0.2646737f, 0.3438676f, 0.3597788f, 0.2236509f, -0.1736124f, 0.1709878f, 0.04503556f, -0.09635679f, 0.6436066f, 0.2090198f, 0.02711023f, 0.2326303f, -0.1065404f, -0.9509786f, -0.2325765f, 0.1544221f, -0.2751956f, 0.7042146f, -0.4068132f, 0.2899525f, 0.0265791f, -0.1967387f, 0.388041f, -0.3834721f, -0.2047732f, 0.0569266f, 0.1351604f, -0.176996f, -0.3894806f, 0.07141749f, -0.3190223f, -0.05900418f, 0.1840743f, -0.3201828f, 0.1980505f, 0.1234963f, 0.5146663f, 0.3697202f, -0.8143802f, -0.0254974f, -0.07480367f, -0.1801966f, 0.2365112f, -0.05512327f, -0.5439774f, 0.4769226f, 0.7698731f, -0.6004848f, -0.085341f, -0.4696675f, 0.04082832f, 0.2239037f, 1.051346f, -0.1383498f, -0.08468492f, -0.1305461f, 0.2989672f, -0.5451998f, 0.3863785f, -0.3071401f, 0.2417691f, 0.4946384f, 0.02267917f, 0.3600539f, -0.3763207f, -0.242952f, -0.2714513f, 0.2369299f, 0.3603672f, -0.3762105f, 0.5859138f, 0.6706463f, -0.6052171f, -0.313265f, -0.2931445f, 0.6946701f, 0.2810324f, -0.102202f, 1.04278f, 0.4558828f, -0.06973435f, -0.03692506f, 0.2253729f, -0.4615842f, -0.2146481f, 0.5937063f, -0.6808646f, 0.2386301f, -0.2036744f, -0.2111247f, 0.2900581f, 1.195907f, -0.5436904f, 0.6489549f, 0.2145444f, -0.05049655f, 0.2477607f, -0.3573639f, 0.1641245f, 0.5994262f, 0.03177933f, -0.5710316f, -0.867107f, -0.07084197f, 0.03151764f, 0.1106393f, 0.164741f, 0.04772172f, 0.5116789f, 0.1214601f, -0.3613172f, -0.3909013f, -0.5997667f, -0.03554616f, 0.4363603f, -0.4605936f, 0.3015168f, 0.110538f, -0.3156358f, -0.1543308f, -0.3778076f, -0.4743398f, -0.1077887f, -0.3962058f, 0.184989f, 0.005257594f, 0.6292952f, 0.1839698f, -0.4257355f, 0.3768158f, -0.08535398f, -1.513304f, 0.2245298f, 0.5928617f, -0.1009406f, -0.1073892f, 0.7404778f, -0.6179999f, -0.01395382f, 0.003107572f, -0.4178863f, 0.0313232f, -0.4618185f, -0.2109826f, -0.4754956f, 0.0788032f, 0.9449018f, 0.8781525f, -0.04766324f, -0.2295351f, 0.9772053f, 1.271364f, -1.215225f, -0.8306733f, -0.04292276f, 1.424835f, 0.3584382f, 0.2593933f, 1.937356f, 0.7413654f, -0.1201806f, 0.075078f, 0.1119515f, -1.098532f, -0.2850582f, 1.082945f, 0.2333543f, 0.5195405f, 0.207578f, -0.08953384f, 0.1799434f, 0.7266961f, -0.4678856f, -0.1348108f, -0.5986385f, 0.181979f, -0.1095482f, 0.4006518f, -0.1867385f, 0.8727071f, -0.5241783f, -0.1144547f, -0.4108657f, 0.1747183f, -0.4905121f, -0.0001584921f, 0.7755764f, 0.5845628f, 0.5368918f, -0.6079502f, 0.09012465f, 0.3649598f, -0.03109173f, 0.3783087f, 0.4622228f, -0.07802889f, 0.6273559f, -0.1378677f, -0.09573829f, -0.2007989f, 0.06580716f, 0.3839642f, -0.6950099f, -0.2748342f, 0.08863469f, -0.0743758f, 0.7926782f, 0.1025299f, 0.4518517f, 0.1853864f, -0.490073f, -0.01760045f, 0.5570593f, -0.4397906f, -0.3394293f, 0.06867854f, -1.089897f, 0.3865048f, -0.3960118f, -0.3399395f, -0.2350589f, 0.7610424f, 0.3074104f, 0.596993f, -0.04108877f, -0.1553082f, -0.2749327f, -0.006232774f, 0.138156f, -0.1149734f, 0.8955715f, 0.2906163f, 0.03776437f, -0.5945804f, -0.3893454f, 0.2783214f, -0.6949809f, 0.1917576f, -0.2701006f, -0.2823438f, -0.5596132f, 0.706767f, 0.3314847f, 0.5291311f, 0.2038559f, -0.4041356f, -0.5790283f, 0.5882117f, 0.8476645f, 0.7575201f, -0.294534f, 0.3177429f, 0.238133f, -0.9230122f, 0.06450139f, -0.2196629f, -0.5004506f, 0.9085382f, -0.4829172f, -0.008421199f, 0.5133038f, -0.2931312f, 0.8758441f, -0.1051285f, 0.01972609f, 0.1020652f, 0.1205391f, -1.134144f, -0.007559349f, 0.1871528f, 0.441355f, 0.2072804f, -0.4828084f, -0.08265133f, -0.02514699f, 0.5436181f, 1.79499f, -0.1148504f, 0.898729f, 0.2599192f, 0.2291017f, 0.5416129f, 0.02948276f, -0.1215168f, 0.06194645f, 0.7421392f, 0.362562f, -0.223209f, 0.6972225f, 0.2365351f, -0.574497f, -0.06849629f, -0.01214015f, 0.248292f, -0.07149886f, 0.209188f, 0.6402278f, 0.1205149f, -0.6282058f, 0.4738845f, -0.1879504f, 0.3639579f, -1.01549f, 0.1074693f, -0.5022891f, 0.02837497f, 1.051501f, 0.06117239f, 0.1959306f, -0.2242651f, -0.3499743f, 0.3864119f, 0.0682165f, 0.2080228f, 0.1447198f, 0.01294322f, -1.581684f, 0.3502727f, -0.5141318f, 0.1298154f, -0.2960167f, -0.7033171f, -0.1374653f, -0.5088665f, 0.3003783f, -0.5662355f, -0.5703801f, 0.4422415f, -0.8586085f, -0.725457f, 0.6167778f, 0.3036367f, 0.05629534f, 0.04369768f, -0.005468868f, 0.0003515407f, -0.7950032f, -0.2773561f, 1.185359f, 0.1597688f, 0.09181309f, -0.6036278f, 0.775687f, 0.02075633f, 0.2728359f, -0.1604523f, -0.9048207f, 0.437628f, 0.3938437f, -0.2422096f, -0.3879563f, -0.3941109f, 0.3870144f, -0.2119341f, 0.3225484f, -0.4521671f, -1.637517f, 0.1391674f, -0.07919937f, 0.1182778f, -0.2813607f, -0.7380996f, 0.5564383f, 0.2574452f, -0.1306375f, -0.5941698f, -0.9490689f, 0.06534845f, -0.7421415f, -0.04068993f, 0.637957f, 0.06663199f, -0.5876228f, 0.650187f, 0.3602454f, -0.3823963f, 1.010726f, -0.2592644f, 0.5051932f, -0.5329336f, 0.5161338f, 0.1395347f, 0.2141521f, -0.01009031f, 0.8129689f, 0.09339745f, -0.2384717f, -0.5503597f, -0.05905038f, 0.3159091f, 0.2486276f, 0.6999242f, 0.4618215f, 0.5996231f, 0.381897f, 0.06791286f, -0.2097166f, -0.5086439f, -0.282001f, -0.413008f, 0.5155523f, 0.2459697f, -0.6758924f, -0.4687051f, 0.2302322f, 0.2223771f, 0.0220779f, 0.0308342f, -0.364092f, 0.006730601f, 0.1143817f, 0.9411471f, -0.2274899f, 0.0431641f, -0.04262143f, -0.07550471f, -1.397982f, -0.2910011f, 0.1307101f, 0.01577637f, -0.2902971f, -0.2635747f, -0.05162496f, -0.2382931f, 0.01894168f, 0.1162009f, -1.529246f, 0.03433805f, -0.2557615f, 0.6106033f, 0.1005622f, 0.5197102f, -0.1460773f, -0.1066797f, -0.8218927f, -0.1438112f, 0.06848451f, -0.5823177f, 0.09372513f, 0.2519848f, 0.04468141f, 0.07306632f, -0.9734757f, -0.4108731f, -0.5826051f, 0.6833557f, 0.4655134f, 0.502941f, -0.7466941f, 0.1482414f, 0.01104161f, 0.8753684f, -0.7230965f, -0.1677399f, -0.3691514f, -0.2796988f, -1.07599f, -0.06921625f, -0.4079558f, -0.2510803f, -0.6646357f, -0.6539558f, 0.06996688f, -0.4298745f, 0.01871285f, 0.3098095f, -0.5346949f, 0.2757194f, 0.03924966f, 0.3699921f }); MatrixAsserts.AreEqual(expected, actual); }
public void DenseLayer_Forward() { const int fanIn = 5; const int batchSize = 2; const int neuronCount = 3; var random = new Random(232); var sut = new DenseLayer(neuronCount, Activation.Undefined); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Forward(input); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, neuronCount, new float[] { 0.9898463f, 0.4394523f, 0.4259368f, -1.051275f, -0.5012454f, 0.08094172f }); MatrixAsserts.AreEqual(expected, actual); }
public void SoftMaxLayer_Forward() { var batchSize = 1; var width = 28; var height = 28; var depth = 3; var numberOfClasses = 10; var random = new Random(232); var sut = new SoftMaxLayer(numberOfClasses); sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); var actual = sut.Forward(input); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, numberOfClasses, new float[] { 0.06976377f, 0.1327717f, 0.02337802f, 0.3784489f, 0.0777365f, 0.05847027f, 0.1072708f, 0.0503228f, 0.0624512f, 0.03938601f }); MatrixAsserts.AreEqual(expected, actual); }
public void BatchNormalizationLayer_Forward() { const int fanIn = 4; const int batchSize = 2; var sut = new BatchNormalizationLayer(); sut.Initialize(1, 1, fanIn, batchSize, Initialization.GlorotUniform, new Random(232)); var data = new float[] { 0, 1, -1, 1, 0.5f, 1.5f, -10, 10 }; var input = Matrix <float> .Build.Dense(batchSize, fanIn, data); Trace.WriteLine(input.ToString()); var actual = sut.Forward(input); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); Trace.WriteLine(actual); var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { -0.999998f, 0.999998f, -0.9999995f, 0.9999995f, -0.999998f, 0.999998f, -1, 1 }); MatrixAsserts.AreEqual(expected, actual); }
public void SquaredErrorRegressionLayer_Forward() { var batchSize = 1; var width = 28; var height = 28; var depth = 3; var numberOfClasses = 10; var random = new Random(232); var sut = new SquaredErrorRegressionLayer(numberOfClasses); sut.Initialize(width, height, depth, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, numberOfClasses, random.Next()); var actual = sut.Forward(input); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, numberOfClasses, new float[] { 0.1234713f, 0.7669879f, -0.9698473f, 1.814438f, 0.2316814f, -0.05312517f, 0.5537131f, -0.2031853f, 0.01274186f, -0.4482329f }); MatrixAsserts.AreEqual(expected, actual); }
public void Conv2DLayer_Backward() { var fanIn = 3 * 3 * 3; var batchSize = 10; var random = new Random(232); var sut = new Conv2DLayer(2, 2, 2); sut.Initialize(3, 3, 3, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, 8, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, 27, new float[] { -0.3012112f, -0.431109f, 0.3067777f, 1.202193f, -0.2970281f, -0.04250076f, 0.2061959f, 0.7989445f, -0.5996055f, 0.2316911f, 0.8747584f, 0.51476f, 0.7046955f, 0.7860816f, 0.8916292f, 0.3832603f, -0.8700395f, -0.5354695f, -0.3685025f, -0.1342522f, 0.1518587f, 0.2221017f, 0.1806079f, 0.07497367f, 0.3097255f, 0.1634672f, -0.4491678f, -0.17365f, -0.144638f, 0.004295252f, -0.3601557f, -1.119732f, -0.1934451f, -0.2176901f, 0.2700419f, 0.089331f, 0.1821172f, 0.883155f, -0.849492f, -0.1244191f, -0.2039348f, -0.1751226f, -0.06979793f, -1.079664f, -0.03928173f, 0.218465f, 1.629254f, -0.4939195f, 0.2245779f, -0.7562671f, -0.8274944f, -0.5467064f, -0.3680224f, -0.1386559f, -1.087083f, -0.2628459f, 1.822653f, 0.701782f, 0.4428222f, -0.1040708f, -0.7712241f, -0.3224123f, -0.01733552f, 0.86768f, 0.3000976f, -0.3464163f, 0.4350502f, 0.2027335f, 0.117341f, 0.1954426f, -0.5851873f, -0.05708569f, -0.4324316f, 0.2123868f, -0.002792224f, -0.9684533f, -0.6718895f, -0.7736996f, 0.03639168f, 0.6314194f, 0.8996651f, -0.3380031f, -0.5858002f, -0.3516838f, 0.3391975f, -0.6496764f, -1.470267f, -0.3591874f, -0.00506255f, 0.3587183f, -0.3065151f, -0.3236075f, -0.3906966f, -0.09693986f, 0.009184331f, -0.259648f, 0.4111665f, 0.6034111f, -0.1156556f, 0.2007988f, 0.6419388f, 0.08893928f, -0.5510148f, -1.088815f, 0.1288811f, -0.3150855f, 0.8156078f, -0.6915295f, 0.9525194f, -0.3977204f, -0.8499367f, -0.6417554f, -0.5841292f, -0.4173682f, -1.056341f, -0.4105635f, 1.05644f, 0.6464114f, 0.3108801f, 0.1191875f, -1.091015f, -0.6754146f, -0.09798362f, 1.521804f, 0.0538983f, -0.5595286f, 0.8281931f, 1.05391f, -0.2553593f, 0.5020249f, 0.5202242f, 0.2157923f, -0.4918408f, 0.170707f, 0.4082467f, -1.021097f, -0.533677f, -0.9184983f, 0.2424989f, 0.4848338f, 1.166928f, -0.3471279f, -0.6809464f, -0.09819638f, 0.1225361f, -0.7392287f, -0.520788f, 0.02109938f, 0.4813665f, 0.2430263f, -0.4380659f, -0.5662301f, 0.09159709f, 0.3489724f, 0.3382439f, 0.003150672f, 0.09243999f, 0.1749734f, -0.2740337f, -0.09017494f, -0.9974357f, -0.339394f, 0.07069169f, 0.1736448f, 0.01214228f, -0.2352773f, 0.009375662f, -0.4198866f, -0.4020877f, 0.1003972f, 0.1117148f, -0.2725339f, -0.1976277f, -0.2717598f, 0.1088151f, -0.272986f, -0.7224701f, -0.3619833f, -0.2441979f, 0.1247422f, 0.02018096f, 0.09719498f, -0.4377319f, -0.8641849f, 0.2047512f, -0.1255917f, 0.1056862f, -0.1779335f, 0.3336785f, -0.03628005f, -0.7774371f, -1.062242f, -0.5129448f, 0.5529103f, -0.9545653f, -0.615674f, 1.543631f, 1.37671f, -0.1174148f, 0.2789351f, 0.8581713f, 0.1639292f, 0.2462995f, 0.4195996f, 0.5214866f, 0.008561775f, 0.1065625f, -0.3906671f, 0.08749793f, -0.2356352f, -0.6500566f, 0.1207049f, -0.4108496f, 0.6503633f, -0.06599725f, -0.6515968f, 0.745948f, 0.4625394f, 0.4467969f, 0.5328804f, 0.1164707f, -1.545646f, -0.7757972f, 0.4053627f, 0.4717687f, -1.017592f, -0.0884065f, 0.6148586f, -0.3979636f, 0.3205289f, -0.06230295f, -0.4828503f, 0.1028745f, -0.02724302f, -0.008226641f, -0.2221806f, 0.4184201f, -0.675901f, -0.3388602f, -0.2884192f, -0.483782f, -0.1539764f, -0.02365633f, 0.562417f, 0.1671077f, -0.2424937f, 0.2923921f, 0.1196359f, 0.1165333f, 0.1479492f, -0.6982891f, -0.7252806f, -0.1735715f, 0.3851384f, 0.4009265f, -0.5441284f, -0.460577f, -0.2921039f, -0.3540158f, 0.242506f, -0.2498714f, -0.4901754f, -0.1536585f, -0.4801114f, 0.07474235f, -0.3067195f, -0.9593061f, -0.7042174f, -0.615593f, 0.1035262f }); MatrixAsserts.AreEqual(expected, actual); }
public void Conv2DLayer_CopyLayerForPredictionModel() { var batchSize = 1; var sut = new Conv2DLayer(2, 2, 3, 4, 5, 6); sut.Initialize(3, 3, 1, batchSize, Initialization.GlorotUniform, new Random(232)); var layers = new List <ILayer>(); sut.CopyLayerForPredictionModel(layers); var actual = (Conv2DLayer)layers.Single(); Assert.AreEqual(sut.Width, actual.Width); Assert.AreEqual(sut.Height, actual.Height); Assert.AreEqual(sut.Depth, actual.Depth); Assert.AreEqual(sut.InputWidth, actual.InputWidth); Assert.AreEqual(sut.InputHeight, actual.InputHeight); Assert.AreEqual(sut.InputDepth, actual.InputDepth); MatrixAsserts.AreEqual(sut.Weights, actual.Weights); MatrixAsserts.AreEqual(sut.Bias, actual.Bias); Assert.AreEqual(sut.Im2Cols.RowCount, actual.Im2Cols.RowCount); Assert.AreEqual(sut.Im2Cols.ColumnCount, actual.Im2Cols.ColumnCount); Assert.AreEqual(sut.BorderMode, actual.BorderMode); Assert.AreEqual(sut.Conv.RowCount, actual.Conv.RowCount); Assert.AreEqual(sut.Conv.ColumnCount, actual.Conv.ColumnCount); Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount); Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount); }
public void DropoutLayer_Backward() { const int fanIn = 5; var batchSize = 1; var random = new Random(232); var sut = new DropoutLayer(0.5); sut.Initialize(5, 1, 1, batchSize, Initialization.GlorotUniform, random); var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); sut.Forward(input); var delta = Matrix <float> .Build.Random(batchSize, fanIn, random.Next()); var actual = sut.Backward(delta); Trace.WriteLine(string.Join(", ", actual.ToColumnMajorArray())); var expected = Matrix <float> .Build.Dense(batchSize, fanIn, new float[] { -1.676851f, -1.938897f, -1.108109f, 0f, -0.4058239f }); MatrixAsserts.AreEqual(expected, actual); }