public void Forward() { const int inputWidth = 4; const int inputHeight = 4; const int inputDepth = 4; const int inputBatchSize = 4; var layer = new PoolLayer <double>(2, 2); layer.Init(inputWidth, inputHeight, inputDepth); var data = new double[4 * 4 * 4 * 4]; for (var i = 0; i < data.Length; i++) { data[i] = i; } var input = BuilderInstance.Volume.From(data, new Shape(inputWidth, inputHeight, inputDepth, inputBatchSize)); layer.DoForward(input); Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[0]); Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[1]); Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[2]); Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[3]); Assert.AreEqual(5.0, layer.OutputActivation.Get(0, 0, 0, 0)); Assert.AreEqual(21.0, layer.OutputActivation.Get(0, 0, 1, 0)); Assert.AreEqual(85.0, layer.OutputActivation.Get(0, 0, 1, 1)); }
public void PoolLayerSerialization() { var layer = new PoolLayer(3, 3) { Pad = 1, Stride = 2 }; layer.Init(28, 24, 1); var data = layer.GetData(); Assert.AreEqual(28, data["InputWidth"]); Assert.AreEqual(24, data["InputHeight"]); Assert.AreEqual(1, data["InputDepth"]); var deserialized = LayerBase <double> .FromData(data) as PoolLayer; Assert.IsNotNull(deserialized); Assert.AreEqual(28, deserialized.InputWidth); Assert.AreEqual(24, deserialized.InputHeight); Assert.AreEqual(1, deserialized.InputDepth); Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth); Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight); Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth); Assert.AreEqual(layer.Width, deserialized.Width); Assert.AreEqual(layer.Height, deserialized.Height); Assert.AreEqual(layer.Pad, deserialized.Pad); Assert.AreEqual(layer.Stride, deserialized.Stride); }
public static PoolLayer<T> Pool<T>(this LayerBase<T> layer, int width, int height) where T : struct, IEquatable<T>, IFormattable { var pool = new PoolLayer<T>(width, height); pool.AcceptParent(layer); return pool; }
public void ComputeTwiceGradientShouldYieldTheSameResult() { const int inputWidth = 20; const int inputHeight = 20; const int inputDepth = 2; const int width = 2; const int height = 2; // Create layer var layer = new PoolLayer <double>(width, height) { Stride = 2 }; layer.Init(inputWidth, inputHeight, inputDepth); // Forward pass var input = BuilderInstance <double> .Volume.Random(new Shape(inputWidth, inputHeight, inputDepth)); var output = layer.DoForward(input, true); // Set output gradients to 1 var outputGradient = BuilderInstance <double> .Volume.From(new double[output.Shape.TotalLength].Populate(1.0), output.Shape); // Backward pass to retrieve gradients layer.Backward(outputGradient); var step1 = layer.InputActivationGradients.Clone().ToArray(); layer.Backward(outputGradient); var step2 = layer.InputActivationGradients.Clone().ToArray(); Assert.IsTrue(step1.SequenceEqual(step2)); }
public void SerializationTest() { // Create a PoolLayer var layer = new PoolLayer(2, 4) { Pad = 5, Stride = 3 }; layer.Init(10, 10, 3); PoolLayer deserialized; using (var ms = new MemoryStream()) { // Serialize IFormatter formatter = new BinaryFormatter(); formatter.Serialize(ms, layer); // Deserialize ms.Position = 0; deserialized = formatter.Deserialize(ms) as PoolLayer; } Assert.AreEqual(layer.InputDepth, deserialized.InputDepth); Assert.AreEqual(layer.InputHeight, deserialized.InputHeight); Assert.AreEqual(layer.InputWidth, deserialized.InputWidth); Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth); Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight); Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth); Assert.AreEqual(layer.Height, deserialized.Height); Assert.AreEqual(layer.Width, deserialized.Width); Assert.AreEqual(layer.Pad, deserialized.Pad); Assert.AreEqual(layer.Stride, deserialized.Stride); }
public static PoolLayer <T> Pool <T>(this LayerBase <T> layer, int width, int height) where T : struct, IEquatable <T>, IFormattable { var pool = new PoolLayer <T>(width, height); layer.ConnectTo(pool); return(pool); }
public static PoolLayer Pool(this LayerBase layer, int width, int height) { var pool = new PoolLayer(width, height); layer.ConnectTo(pool); return(pool); }
public void GradientWrtInputCheck() { const int inputWidth = 20; const int inputHeight = 20; const int inputDepth = 2; // Create layer const int width = 2; const int height = 2; var layer = new PoolLayer(width, height) { Stride = 2 }; GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth, 1e-6); }
public void GradientWrtInputCheck() { const int inputWidth = 20; const int inputHeight = 20; const int inputDepth = 2; const int width = 2; const int height = 2; const int batchSize = 3; // Create layer var layer = new PoolLayer <double>(width, height) { Stride = 2 }; GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth, batchSize, 1e-6); }
public void EvenPool() { var Factory = Defaults.RawFactory; var MeanPoolLayer = new PoolLayer() { Factory = Factory, InputShape = new int[] { 3, 4, 4 }, KernelShape = new int[] { 1, 2, 2 }, Stride = new int[] { 1, 2, 2 } }; MeanPoolLayer.Prepare(); var data = new double[1, 48]; for (int i = 0; i < 48; i++) { data[0, i] = i; } var m = Factory.GetEncryptedMatrix(Matrix <double> .Build.DenseOfArray(data), EMatrixFormat.ColumnMajor, 1); Utils.ProcessInEnv(env => { var t = MeanPoolLayer.Apply(m); var res = t.Decrypt(env); Assert.AreEqual(12, res.ColumnCount); Assert.AreEqual(1, res.RowCount); var expected = new double[] { 2.5, 4.5, 10.5, 12.5, 18.5, 20.5, 26.5, 28.5, 34.5, 36.5, 42.5, 44.5 }; for (int i = 0; i < 12; i++) { Assert.AreEqual(expected[i], res[0, i]); } t.Dispose(); m.Dispose(); }, Factory); MeanPoolLayer.Dispose(); }
public void CryptoNetsPoolLayer() { OperationsCount.Reset(); int batchSize = 8192; var Factory = new EncryptedSealBfvFactory(new ulong[] { 549764251649 /*, 549764284417*/ }, (ulong)batchSize); int weightscale = 32; var DenseLayer3 = new PoolLayer() { Source = new DummyLayer(), InputShape = new int[] { 5 * 13 * 13 }, KernelShape = new int[] { 5 * 13 * 13 }, Stride = new int[] { 1000 }, MapCount = new int[] { 100 }, Weights = CryptoNets.CryptoNets.Transpose(Weights.Weights_1, 5 * 13 * 13, 100), Bias = Weights.Biases_2, WeightsScale = weightscale * weightscale, Factory = Factory }; DenseLayer3.Prepare(); var input = Matrix <double> .Build.Dense(8192, 5 * 13 * 13); var m = Factory.GetEncryptedMatrix(input, EMatrixFormat.ColumnMajor, 1); var start = DateTime.Now; var z = DenseLayer3.Apply(m); var stop = DateTime.Now; var time = (stop - start).TotalMilliseconds; Console.WriteLine("time {0}", time); OperationsCount.Print(); z.Dispose(); m.Dispose(); DenseLayer3.Dispose(); }
public static PoolLayer <T> Stride <T>(this PoolLayer <T> layer, int stride) where T : struct, IEquatable <T>, IFormattable { layer.Stride = stride; return(layer); }
public static PoolLayer <T> Pad <T>(this PoolLayer <T> layer, int pad) where T : struct, IEquatable <T>, IFormattable { layer.Pad = pad; return(layer); }
public void JsonNetSerializerTest() { var net = new Net(); net.AddLayer(new InputLayer(5, 5, 3)); var conv = new ConvLayer(2, 2, 16); net.AddLayer(conv); var pool = new PoolLayer(2, 2); net.AddLayer(pool); var fullycon = new FullyConnLayer(3); net.AddLayer(fullycon); net.AddLayer(new SoftmaxLayer(3)); // Serialize to json var json = net.ToJSON(); // Deserialize from json Net deserialized = SerializationExtensions.FromJSON(json); // Make sure deserialized is identical to serialized Assert.IsNotNull(deserialized.Layers); Assert.AreEqual(net.Layers.Count, deserialized.Layers.Count); Assert.IsTrue(net.Layers[0] is InputLayer); var deserializedConv = deserialized.Layers[1] as ConvLayer; Assert.NotNull(deserializedConv); Assert.NotNull(deserializedConv.Filters); Assert.AreEqual(16, deserializedConv.Filters.Count); for (int i = 0; i < deserializedConv.Filters.Count; i++) { for (int k = 0; k < deserializedConv.Filters[i].Length; k++) { Assert.AreEqual(conv.Filters[i].Get(k), deserializedConv.Filters[i].Get(k)); Assert.AreEqual(conv.Filters[i].GetGradient(k), deserializedConv.Filters[i].GetGradient(k)); } } var deserializedPool = deserialized.Layers[2] as PoolLayer; Assert.NotNull(deserializedPool); Assert.AreEqual(2, deserializedPool.Height); Assert.AreEqual(2, deserializedPool.Width); Assert.AreEqual(2, deserializedPool.OutputHeight); Assert.AreEqual(2, deserializedPool.OutputWidth); Assert.AreEqual(0, deserializedPool.Pad); Assert.AreEqual(2, deserializedPool.Stride); var deserializedFullyCon = deserialized.Layers[3] as FullyConnLayer; Assert.NotNull(deserializedFullyCon); Assert.NotNull(deserializedFullyCon.Filters); Assert.AreEqual(3, deserializedFullyCon.Filters.Count); for (int i = 0; i < deserializedFullyCon.Filters.Count; i++) { for (int k = 0; k < deserializedFullyCon.Filters[i].Length; k++) { Assert.AreEqual(fullycon.Filters[i].Get(k), deserializedFullyCon.Filters[i].Get(k)); Assert.AreEqual(fullycon.Filters[i].GetGradient(k), deserializedFullyCon.Filters[i].GetGradient(k)); } } Assert.IsTrue(deserialized.Layers[4] is SoftmaxLayer); Assert.AreEqual(3, ((SoftmaxLayer)deserialized.Layers[4]).ClassCount); }
public static PoolLayer Stride(this PoolLayer layer, int stride) { layer.Stride = stride; return(layer); }
public static PoolLayer Pad(this PoolLayer layer, int pad) { layer.Pad = pad; return(layer); }
static void Main(string[] args) { string fileName = "MNIST-28x28-test.txt"; int batchSize = 8192; int numberOfRecords = 10000; var Factory = new EncryptedSealBfvFactory(new ulong[] { 2148728833, 2148794369, 2149810177 }, 16384, DecompositionBitCount: 60, GaloisDecompositionBitCount: 60, SmallModulusCount: 7); int weightscale = 32; var ReaderLayer = new BatchReader { FileName = fileName, SparseFormat = true, MaxSlots = batchSize, NormalizationFactor = 1.0 / 256.0, Scale = 16.0 }; var EncryptedLayer = new EncryptLayer() { Source = ReaderLayer, Factory = Factory }; var StartTimingLayer = new TimingLayer() { Source = EncryptedLayer, StartCounters = new string[] { "Batch-Time" } }; var ConvLayer1 = new PoolLayer() { Source = StartTimingLayer, InputShape = new int[] { 28, 28 }, KernelShape = new int[] { 5, 5 }, Upperpadding = new int[] { 1, 1 }, Stride = new int[] { 2, 2 }, MapCount = new int[] { 5, 1 }, WeightsScale = weightscale, Weights = Weights.Weights_0 }; //var ActivationLayer2 = new SquareActivation() { Source = ConvLayer1 }; var ActivationLayer2 = new AppxReLUActivation() { Source = ConvLayer1 }; var DenseLayer3 = new PoolLayer() { Source = ActivationLayer2, InputShape = new int[] { 5 * 13 * 13 }, KernelShape = new int[] { 5 * 13 * 13 }, Stride = new int[] { 1000 }, MapCount = new int[] { 100 }, Weights = Transpose(Weights.Weights_1, 5 * 13 * 13, 100), Bias = Weights.Biases_2, WeightsScale = weightscale * weightscale }; //var ActivationLayer4 = new SquareActivation() { Source = DenseLayer3 }; var ActivationLayer4 = new AppxReLUActivation() { Source = DenseLayer3 }; var DenseLayer5 = new PoolLayer() { Source = ActivationLayer4, InputShape = new int[] { 100 }, KernelShape = new int[] { 100 }, Stride = new int[] { 1000 }, MapCount = new int[] { 10 }, Weights = Weights.Weights_3, Bias = Weights.Biases_3, WeightsScale = weightscale }; var StopTimingLayer = new TimingLayer() { Source = DenseLayer5, StopCounters = new string[] { "Batch-Time" } }; var network = StopTimingLayer; OperationsCount.Reset(); Console.WriteLine("Preparing"); network.PrepareNetwork(); OperationsCount.Print(); OperationsCount.Reset(); for (var p = (INetwork)network; p != null; p = p.Source) { if (p is BaseLayer b) { b.Verbose = true; } } int count = 0; int errs = 0; while (count < numberOfRecords) { using (var m = network.GetNext()) Utils.ProcessInEnv(env => { var decrypted = m.Decrypt(env); for (int i = 0; i < decrypted.RowCount; i++) { int pred = 0; for (int j = 1; j < decrypted.ColumnCount; j++) { if (decrypted[i, j] > decrypted[i, pred]) { pred = j; } } if (pred != ReaderLayer.Labels[i]) { errs++; } count++; if (count % 100 == 0) { Console.WriteLine("errs {0}/{1} accuracy {2:0.000}% prediction {3} label {4}", errs, count, 100 - (100.0 * errs / (count)), pred, ReaderLayer.Labels[i]); } } Console.WriteLine("Batch size {0} {1}", batchSize, TimingLayer.GetStats()); }, Factory); } Console.WriteLine("errs {0}/{1} accuracy {2:0.000}%", errs, count, 100 - (100.0 * errs / (count))); network.DisposeNetwork(); }