public void AveragePoolingLayer_Forward() { var bottom = new Tensor(1, 1, 3, 3); var filler = new ConstantFiller(2.0d); filler.Fill(bottom); var layer = new AveragePoolingLayer(3, 1, 1); layer.Setup(bottom, top); Assert.Equal(1, top.Num); Assert.Equal(1, top.Channels); Assert.Equal(3, top.Height); Assert.Equal(3, top.Width); layer.Forward(bottom, top); using (var topCpu = top.OnCpu()) { var topData = topCpu.Data; AssertInRange(8.0d / 9, topData[0]); AssertInRange(4.0d / 3, topData[1]); AssertInRange(8.0d / 9, topData[2]); AssertInRange(4.0d / 3, topData[3]); AssertInRange(2.0d, topData[4]); AssertInRange(4.0d / 3, topData[5]); AssertInRange(8.0d / 9, topData[6]); AssertInRange(4.0d / 3, topData[7]); AssertInRange(8.0d / 9, topData[8]); } }
public void AveragePoolingLayer_BackwardGradient( Size kernel, Size stride, Size padding ) { var filler = new ConstantFiller(2.0d); filler.Fill(bottom); var checker = new GradientChecker(1e-2f, 1e-2f); var layer = new AveragePoolingLayer(kernel, stride, padding); checker.CheckExhaustive(layer, bottom, top); }
public void AveragePoolingLayer_BackwardGradient(Size kernel, Size stride, Size padding) { var filler = new ConstantFiller(2.0d); filler.Fill(bottom); var checker = new GradientChecker(1e-2f, 1e-2f); var layer = new AveragePoolingLayer(kernel, stride, padding); checker.CheckExhaustive(layer, bottom, top); }
public void Filler_Constant() { var blob = new Tensor(2, 3, 4, 5); var config = new ConstantFillerConfiguration(10.0d); var filler = new ConstantFiller(config); filler.Fill(blob); using (var blobCpu = blob.OnCpu()) { int count = blobCpu.Count; var data = blobCpu.Data; for (int i = 0; i < count; i++) Assert.Equal(data[i], 10.0d); } }
public void Filler_Constant() { var blob = new Tensor(2, 3, 4, 5); var config = new ConstantFillerConfiguration(10.0d); var filler = new ConstantFiller(config); filler.Fill(blob); using (var blobCpu = blob.OnCpu()) { int count = blobCpu.Count; var data = blobCpu.Data; for (int i = 0; i < count; i++) { Assert.Equal(data[i], 10.0d); } } }
public void DataContextSerializationTest() { DataContext dcOrig = new DataContext(); string filename = "dataContextTest.txt"; { Serializator ser = new Serializator(); ConstantFiller filler = new ConstantFiller(); filler.Fill(dcOrig); ser.Add(dcOrig); ser.SetFilename(filename); ser.Write(); } { Serializator ser = new Serializator(); ser.SetFilename(filename); ser.Read(); DataContext dcNew = (DataContext)ser.GetNext(); Assert.AreEqual(dcOrig.bookDictionary[1].ToString(), dcNew.bookDictionary[1].ToString()); Assert.AreEqual(dcOrig.clientList.Count, dcNew.clientList.Count); } }
public void MaxPoolingLayer_BackwardsRectangularWithSquareKernel(int topLayer) { Contract.Requires(topLayer > 0); const int num = 2; const int channels = 2; var bottom = new Tensor(num, channels, 3, 5); var topList = new Tensor[topLayer]; for (int i = 0; i < topLayer; i++) topList[i] = new Tensor(); var filler = new ConstantFiller(2); filler.Fill(bottom); // Input: 2x 2 channels of: // [2 2 2 2 2] // [2 2 2 2 2] // [2 2 2 2 2] var layer = new MaxPoolingLayer(2, 1, 0); layer.Setup(new TensorCollection { bottom }, topList); layer.Forward(new TensorCollection { bottom }, topList); // Input: 2x 2 channels of: // [1 1 1 1] // [0 0 0 0] using (var topCpu = topList[0].OnCpu()) { var topDiff = topCpu.Diff; for (int i = 0; i < 8 * num * channels; i += 8) { topDiff[i + 0] = 1; topDiff[i + 1] = 1; topDiff[i + 2] = 1; topDiff[i + 3] = 1; } } // Input: 2x 2 channels of: // [1 2 2 2 1] // [1 2 2 2 1] // [0 0 0 0 0] layer.Backward(topList, new[] { true }, new TensorCollection { bottom }); using (var bottomCpu = bottom.OnCpu()) { var bottomDiff = bottomCpu.Diff; for (int i = 0; i < 15 * num * channels; i += 15) { Assert.Equal(1, bottomDiff[i + 0]); Assert.Equal(2, bottomDiff[i + 1]); Assert.Equal(2, bottomDiff[i + 2]); Assert.Equal(2, bottomDiff[i + 3]); Assert.Equal(1, bottomDiff[i + 4]); Assert.Equal(1, bottomDiff[i + 5]); Assert.Equal(2, bottomDiff[i + 6]); Assert.Equal(2, bottomDiff[i + 7]); Assert.Equal(2, bottomDiff[i + 8]); Assert.Equal(1, bottomDiff[i + 9]); Assert.Equal(0, bottomDiff[i + 10]); Assert.Equal(0, bottomDiff[i + 11]); Assert.Equal(0, bottomDiff[i + 12]); Assert.Equal(0, bottomDiff[i + 13]); Assert.Equal(0, bottomDiff[i + 14]); } } }
public void MaxPoolingLayer_BackwardsRectangularWithSquareKernel(int topLayer) { Contract.Requires(topLayer > 0); const int num = 2; const int channels = 2; var bottom = new Tensor(num, channels, 3, 5); var topList = new Tensor[topLayer]; for (int i = 0; i < topLayer; i++) { topList[i] = new Tensor(); } var filler = new ConstantFiller(2); filler.Fill(bottom); // Input: 2x 2 channels of: // [2 2 2 2 2] // [2 2 2 2 2] // [2 2 2 2 2] var layer = new MaxPoolingLayer(2, 1, 0); layer.Setup(new TensorCollection { bottom }, topList); layer.Forward(new TensorCollection { bottom }, topList); // Input: 2x 2 channels of: // [1 1 1 1] // [0 0 0 0] using (var topCpu = topList[0].OnCpu()) { var topDiff = topCpu.Diff; for (int i = 0; i < 8 * num * channels; i += 8) { topDiff[i + 0] = 1; topDiff[i + 1] = 1; topDiff[i + 2] = 1; topDiff[i + 3] = 1; } } // Input: 2x 2 channels of: // [1 2 2 2 1] // [1 2 2 2 1] // [0 0 0 0 0] layer.Backward(topList, new[] { true }, new TensorCollection { bottom }); using (var bottomCpu = bottom.OnCpu()) { var bottomDiff = bottomCpu.Diff; for (int i = 0; i < 15 * num * channels; i += 15) { Assert.Equal(1, bottomDiff[i + 0]); Assert.Equal(2, bottomDiff[i + 1]); Assert.Equal(2, bottomDiff[i + 2]); Assert.Equal(2, bottomDiff[i + 3]); Assert.Equal(1, bottomDiff[i + 4]); Assert.Equal(1, bottomDiff[i + 5]); Assert.Equal(2, bottomDiff[i + 6]); Assert.Equal(2, bottomDiff[i + 7]); Assert.Equal(2, bottomDiff[i + 8]); Assert.Equal(1, bottomDiff[i + 9]); Assert.Equal(0, bottomDiff[i + 10]); Assert.Equal(0, bottomDiff[i + 11]); Assert.Equal(0, bottomDiff[i + 12]); Assert.Equal(0, bottomDiff[i + 13]); Assert.Equal(0, bottomDiff[i + 14]); } } }