public void ComputeTwiceGradientShouldYieldTheSameResult() { const int inputWidth = 10; const int inputHeight = 10; const int inputDepth = 5; // Create layer var layer = new FullyConnLayer <double>(5) { BiasPref = 0.1 }; layer.Init(inputWidth, inputHeight, inputDepth); // Forward pass var input = BuilderInstance <double> .Volume.Random(new Shape(inputWidth, inputHeight, inputDepth)); var output = layer.DoForward(input, true); // Set output gradients to 1 var outputGradient = BuilderInstance <double> .Volume.From(new double[output.Shape.TotalLength].Populate(1.0), output.Shape); // Backward pass to retrieve gradients layer.Backward(outputGradient); var step1 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray(); layer.Backward(outputGradient); var step2 = ((Volume <double>)layer.InputActivationGradients.Clone()).ToArray(); Assert.IsTrue(step1.SequenceEqual(step2)); }
public void GradientWrtParametersCheck(int inputWidth, int inputHeight, int inputDepth, int neuronCount) { // Create layer var layer = new FullyConnLayer(neuronCount); GradientCheckTools.GradienWrtParameterstCheck(inputWidth, inputHeight, inputDepth, layer); }
public static FullyConnLayer<T> FullyConn<T>(this LayerBase<T> layer, int neuronCount) where T : struct, IEquatable<T>, IFormattable { var fullyConn = new FullyConnLayer<T>(neuronCount); fullyConn.AcceptParent(layer); return fullyConn; }
public void FullyConnLayerSerialization() { var layer = new FullyConnLayer(10) { BiasPref = 0.5 }; layer.Init(28, 24, 1); var data = layer.GetData(); Assert.AreEqual(28, data["InputWidth"]); Assert.AreEqual(24, data["InputHeight"]); Assert.AreEqual(1, data["InputDepth"]); var deserialized = LayerBase <double> .FromData(data) as FullyConnLayer; Assert.IsNotNull(deserialized); Assert.AreEqual(28, deserialized.InputWidth); Assert.AreEqual(24, deserialized.InputHeight); Assert.AreEqual(1, deserialized.InputDepth); Assert.AreEqual(layer.NeuronCount, deserialized.NeuronCount); Assert.AreEqual(layer.Filters.Shape, deserialized.Filters.Shape); Assert.IsTrue(layer.Filters.ToArray().SequenceEqual(deserialized.Filters.ToArray())); Assert.AreEqual(layer.Bias.Shape, deserialized.Bias.Shape); Assert.IsTrue(layer.Bias.ToArray().SequenceEqual(deserialized.Bias.ToArray())); Assert.AreEqual(layer.BiasPref, deserialized.BiasPref); }
public static FullyConnLayer FullyConn(this LayerBase layer, int neuronCount) { var fullyConn = new FullyConnLayer(neuronCount); layer.ConnectTo(fullyConn); return(fullyConn); }
public static FullyConnLayer <T> FullyConn <T>(this LayerBase <T> layer, int neuronCount) where T : struct, IEquatable <T>, IFormattable { var fullyConn = new FullyConnLayer <T>(neuronCount); layer.ConnectTo(fullyConn); return(fullyConn); }
private static Network CreateNewNetwork() { Network net = new Network(); InputLayer il = new InputLayer(); il.OutputWidth = 32; il.OutputHeight = 32; il.OutputDepth = 3; net.Layers.Add(il); ConvLayer conv = new ConvLayer(16, 5, 5, 3, 32, 32, 1, 2, 0, 1, 0.1); net.Layers.Add(conv); ReluLayer rl = new ReluLayer(conv.OutputDepth, conv.OutputWidth, conv.OutputHeight); net.Layers.Add(rl); MaxPoolLayer pl = new MaxPoolLayer(2, 2, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 2, 0, 0); net.Layers.Add(pl); ConvLayer conv2 = new ConvLayer(20, 5, 5, pl.OutputDepth, pl.OutputWidth, pl.OutputHeight, 1, 2, 0, 1, 0.1); net.Layers.Add(conv2); ReluLayer rl2 = new ReluLayer(conv2.OutputDepth, conv2.OutputWidth, conv2.OutputHeight); net.Layers.Add(rl2); MaxPoolLayer pl2 = new MaxPoolLayer(2, 2, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 2, 0, 0); net.Layers.Add(pl2); ConvLayer conv3 = new ConvLayer(20, 5, 5, pl2.OutputDepth, pl2.OutputWidth, pl2.OutputHeight, 1, 2, 0, 1, 0.1); net.Layers.Add(conv3); ReluLayer rl3 = new ReluLayer(conv3.OutputDepth, conv3.OutputWidth, conv3.OutputHeight); net.Layers.Add(rl3); MaxPoolLayer pl3 = new MaxPoolLayer(2, 2, rl3.OutputDepth, rl3.OutputWidth, rl3.OutputHeight, 2, 0, 0); net.Layers.Add(pl3); FullyConnLayer fc = new FullyConnLayer(10, pl3.OutputDepth, pl3.OutputWidth, pl3.OutputHeight, 0, 1, 0); net.Layers.Add(fc); SoftmaxLayer sl = new SoftmaxLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight); net.LossLayer = sl; return(net); }
private static Network CreateNewNetwork() { var num_inputs = 27; // 9 eyes, each sees 3 numbers (wall, green, red thing proximity) var num_actions = 5; // 5 possible angles agent can turn var temporal_window = 1; // amount of temporal memory. 0 = agent lives in-the-moment :) var network_size = num_inputs * temporal_window + num_actions * temporal_window + num_inputs; Network net = new Network(); InputLayer il = new InputLayer(); il.OutputWidth = 1; il.OutputHeight = 1; il.OutputDepth = network_size; net.Layers.Add(il); ConvLayer conv = new ConvLayer(16, 5, 5, il.OutputDepth, il.OutputWidth, il.OutputHeight, 1, 2, 0, 1, 0.1); net.Layers.Add(conv); ReluLayer rlv = new ReluLayer(conv.OutputDepth, conv.OutputWidth, conv.OutputHeight); net.Layers.Add(rlv); MaxPoolLayer pl = new MaxPoolLayer(2, 2, rlv.OutputDepth, rlv.OutputWidth, rlv.OutputHeight, 2, 0, 0); net.Layers.Add(pl); FullyConnLayer fc = new FullyConnLayer(50, pl.OutputDepth, pl.OutputWidth, pl.OutputHeight, 0, 1, 0); net.Layers.Add(fc); ReluLayer rl = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight); net.Layers.Add(rl); FullyConnLayer fc2 = new FullyConnLayer(50, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 0, 1, 0); net.Layers.Add(fc2); ReluLayer rl2 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight); net.Layers.Add(rl2); FullyConnLayer fc8 = new FullyConnLayer(5, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 0, 1, 0); net.Layers.Add(fc8); RegressionLayer sl = new RegressionLayer(fc8.OutputDepth, fc8.OutputWidth, fc8.OutputHeight); net.LossLayer = sl; return(net); }
public void BinaryNetSerializerTest() { var net = new Net(); net.AddLayer(new InputLayer(5, 5, 3)); var conv = new ConvLayer(2, 2, 16); net.AddLayer(conv); var fullycon = new FullyConnLayer(3); net.AddLayer(fullycon); net.AddLayer(new SoftmaxLayer(3)); // Serialize (binary) using (var ms = new MemoryStream()) { net.SaveBinary(ms); ms.Position = 0; // Deserialize (binary) Net deserialized = SerializationExtensions.LoadBinary(ms) as Net; // Make sure deserialized is identical to serialized Assert.IsNotNull(deserialized.Layers); Assert.AreEqual(net.Layers.Count, deserialized.Layers.Count); Assert.IsTrue(net.Layers[0] is InputLayer); var deserializedConv = net.Layers[1] as ConvLayer; Assert.NotNull(deserializedConv); Assert.NotNull(deserializedConv.Filters); Assert.AreEqual(16, deserializedConv.Filters.Count); for (int i = 0; i < deserializedConv.Filters.Count; i++) { for (int k = 0; k < deserializedConv.Filters[i].Length; k++) { Assert.AreEqual(conv.Filters[i].Get(k), deserializedConv.Filters[i].Get(k)); Assert.AreEqual(conv.Filters[i].GetGradient(k), deserializedConv.Filters[i].GetGradient(k)); } } var deserializedFullyCon = net.Layers[2] as FullyConnLayer; Assert.NotNull(deserializedFullyCon); Assert.NotNull(deserializedFullyCon.Filters); Assert.AreEqual(3, deserializedFullyCon.Filters.Count); for (int i = 0; i < deserializedFullyCon.Filters.Count; i++) { for (int k = 0; k < deserializedFullyCon.Filters[i].Length; k++) { Assert.AreEqual(fullycon.Filters[i].Get(k), deserializedFullyCon.Filters[i].Get(k)); Assert.AreEqual(fullycon.Filters[i].GetGradient(k), deserializedFullyCon.Filters[i].GetGradient(k)); } } Assert.IsTrue(deserialized.Layers[3] is SoftmaxLayer); Assert.AreEqual(3, ((SoftmaxLayer)deserialized.Layers[3]).ClassCount); } }
public void JsonNetSerializerTest() { var net = new Net(); net.AddLayer(new InputLayer(5, 5, 3)); var conv = new ConvLayer(2, 2, 16); net.AddLayer(conv); var fullycon = new FullyConnLayer(3); net.AddLayer(fullycon); net.AddLayer(new SoftmaxLayer(3)); // Serialize to json var json = net.ToJSON(); // Deserialize from json Net deserialized = SerializationExtensions.FromJSON(json); // Make sure deserialized is identical to serialized Assert.IsNotNull(deserialized.Layers); Assert.AreEqual(net.Layers.Count, deserialized.Layers.Count); Assert.IsTrue(net.Layers[0] is InputLayer); var deserializedConv = net.Layers[1] as ConvLayer; Assert.NotNull(deserializedConv); Assert.NotNull(deserializedConv.Filters); Assert.AreEqual(16, deserializedConv.Filters.Count); for (int i = 0; i < deserializedConv.Filters.Count; i++) { for (int k = 0; k < deserializedConv.Filters[i].Length; k++) { Assert.AreEqual(conv.Filters[i].Get(k), deserializedConv.Filters[i].Get(k)); Assert.AreEqual(conv.Filters[i].GetGradient(k), deserializedConv.Filters[i].GetGradient(k)); } } var deserializedFullyCon = net.Layers[2] as FullyConnLayer; Assert.NotNull(deserializedFullyCon); Assert.NotNull(deserializedFullyCon.Filters); Assert.AreEqual(3, deserializedFullyCon.Filters.Count); for (int i = 0; i < deserializedFullyCon.Filters.Count; i++) { for (int k = 0; k < deserializedFullyCon.Filters[i].Length; k++) { Assert.AreEqual(fullycon.Filters[i].Get(k), deserializedFullyCon.Filters[i].Get(k)); Assert.AreEqual(fullycon.Filters[i].GetGradient(k), deserializedFullyCon.Filters[i].GetGradient(k)); } } Assert.IsTrue(net.Layers[3] is SoftmaxLayer); Assert.AreEqual(3, ((SoftmaxLayer)net.Layers[3]).ClassCount); }
public void GradientWrtInputCheck2() { const int inputWidth = 20; const int inputHeight = 20; const int inputDepth = 2; // Create layer const int neuronCount = 20; var layer = new FullyConnLayer(neuronCount); GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth); }
public void GradientWrtParametersCheck() { const int inputWidth = 2; const int inputHeight = 2; const int inputDepth = 2; const int neuronCount = 2; // Create layer var layer = new FullyConnLayer(neuronCount); layer.Init(inputWidth, inputHeight, inputDepth); GradientCheckTools.GradienWrtParameterstCheck(inputWidth, inputHeight, inputDepth, layer); }
public void GradientWrtInputCheck() { const int inputWidth = 15; const int inputHeight = 15; const int inputDepth = 2; const int batchSize = 3; var layer = new FullyConnLayer <double>(2) { BiasPref = 0.1 }; GradientCheckTools.GradientCheck(layer, inputWidth, inputHeight, inputDepth, batchSize); }
public void GradientWrtParametersCheck() { const int inputWidth = 2; const int inputHeight = 2; const int inputDepth = 2; const int batchSize = 3; // Create layer var layer = new FullyConnLayer <double>(2) { BiasPref = 0.1 }; GradientCheckTools.GradienWrtParameterstCheck(inputWidth, inputHeight, inputDepth, batchSize, layer); }
public void BiasPrefUpdateWhenAddingReluLayer() { var net = new Net(); net.AddLayer(new InputLayer(10, 10, 3)); var dotProduct1 = new FullyConnLayer(5); net.AddLayer(dotProduct1); net.AddLayer(new ReluLayer()); var dotProduct2 = new ConvLayer(5, 5, 3); net.AddLayer(dotProduct2); net.AddLayer(new ReluLayer()); Assert.AreEqual(0.1, dotProduct1.BiasPref); Assert.AreEqual(0.1, dotProduct2.BiasPref); }
private static Network CreateNewNetwork() { Network net = new Network(); InputLayer il = new InputLayer(); il.OutputWidth = 1; il.OutputHeight = 1; il.OutputDepth = 2; net.Layers.Add(il); FullyConnLayer fc = new FullyConnLayer(50, il.OutputDepth, il.OutputWidth, il.OutputHeight, 0, 1, 0); net.Layers.Add(fc); ReluLayer rl = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight); net.Layers.Add(rl); FullyConnLayer fc2 = new FullyConnLayer(50, rl.OutputDepth, rl.OutputWidth, rl.OutputHeight, 0, 1, 0); net.Layers.Add(fc2); ReluLayer rl2 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight); net.Layers.Add(rl2); FullyConnLayer fc8 = new FullyConnLayer(3, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 0, 1, 0); net.Layers.Add(fc8); RegressionLayer sl = new RegressionLayer(fc8.OutputDepth, fc8.OutputWidth, fc8.OutputHeight); net.LossLayer = sl; return(net); }
public void Forward() { const int inputWidth = 2; const int inputHeight = 2; const int inputDepth = 2; const int inputBatchSize = 2; var layer = new FullyConnLayer <double>(2) { BiasPref = 0.1 }; layer.Init(inputWidth, inputHeight, inputDepth); // Make sure filter shape had flatten input shape Assert.AreEqual(1, layer.Filters.Shape.Dimensions[0]); Assert.AreEqual(1, layer.Filters.Shape.Dimensions[1]); Assert.AreEqual(8, layer.Filters.Shape.Dimensions[2]); Assert.AreEqual(2, layer.Filters.Shape.Dimensions[3]); for (var i = 0; i < 8; i++) { layer.Filters.Set(0, 0, i, 0, i); layer.Filters.Set(0, 0, i, 1, i * 2.0); } var input = BuilderInstance.Volume.From(new[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0 }, new Shape(inputWidth, inputHeight, inputDepth, inputBatchSize)); layer.DoForward(input); }
public void SerializationTest() { // Create a FullyConnLayer var layer = new FullyConnLayer(20) { BiasPref = 0.1, }; layer.Init(10, 10, 3); foreach (var filter in layer.Filters) { for (int i = 0; i < filter.Length; i++) { filter.Set(i, i); } } for (int i = 0; i < layer.Biases.Length; i++) { layer.Biases.Set(i, i); } FullyConnLayer deserialized; using (var ms = new MemoryStream()) { // Serialize IFormatter formatter = new BinaryFormatter(); formatter.Serialize(ms, layer); // Deserialize ms.Position = 0; deserialized = formatter.Deserialize(ms) as FullyConnLayer; } Assert.AreEqual(layer.BiasPref, deserialized.BiasPref); Assert.AreEqual(layer.Filters.Count, deserialized.Filters.Count); Assert.AreEqual(layer.InputDepth, deserialized.InputDepth); Assert.AreEqual(layer.InputHeight, deserialized.InputHeight); Assert.AreEqual(layer.InputWidth, deserialized.InputWidth); Assert.AreEqual(layer.L1DecayMul, deserialized.L1DecayMul); Assert.AreEqual(layer.L2DecayMul, deserialized.L2DecayMul); Assert.AreEqual(layer.NeuronCount, deserialized.NeuronCount); Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth); Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight); Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth); for (int j = 0; j < layer.Filters.Count; j++) { var filter = layer.Filters[j]; var deserializedFilter = deserialized.Filters[j]; for (int i = 0; i < filter.Length; i++) { Assert.AreEqual(filter.Get(i), deserializedFilter.Get(i)); } } for (int i = 0; i < layer.Biases.Length; i++) { Assert.AreEqual(layer.Biases.Get(i), deserialized.Biases.Get(i)); } }
public void CompareCoreVsFlow() { var inputWidth = 28; var inputHeigth = 28; var inputDepth = 3; var batchSize = 20; #region Flow network var netFlow = new Net <T>(); netFlow.AddLayer(new InputLayer <T>()); var convLayerFlow1 = new ConvLayer <T>(5, 5, 8) { BiasPref = (T)Convert.ChangeType(0.1, typeof(T)), Stride = 1, Pad = 2 }; netFlow.AddLayer(convLayerFlow1); netFlow.AddLayer(new ReluLayer <T>()); netFlow.AddLayer(new PoolLayer <T>(2, 2) { Stride = 2 }); var fullyConnLayerFlow = new FullyConnLayer <T>(10); netFlow.AddLayer(fullyConnLayerFlow); netFlow.AddLayer(new SoftmaxLayer <T>()); var trainerFlow = new SgdTrainer <T>(netFlow, (T)Convert.ChangeType(0.01f, typeof(T))) { BatchSize = batchSize }; #endregion #region Core network var netCore = new Core.Net <T>(); netCore.AddLayer(new Core.Layers.InputLayer <T>(inputWidth, inputHeigth, inputDepth)); var convLayerCore1 = new Core.Layers.ConvLayer <T>(5, 5, 8) { BiasPref = (T)Convert.ChangeType(0.1, typeof(T)), Stride = 1, Pad = 2 }; netCore.AddLayer(convLayerCore1); netCore.AddLayer(new Core.Layers.ReluLayer <T>()); netCore.AddLayer(new Core.Layers.PoolLayer <T>(2, 2) { Stride = 2 }); var fullyConnLayerCore = new Core.Layers.FullyConnLayer <T>(10); netCore.AddLayer(fullyConnLayerCore); netCore.AddLayer(new Core.Layers.SoftmaxLayer <T>(10)); var trainerCore = new Core.Training.SgdTrainer <T>(netCore) { LearningRate = (T)Convert.ChangeType(0.01f, typeof(T)), BatchSize = batchSize }; #endregion // Same weights var convfilterCore1 = netFlow.Session.GetVariableByName(netFlow.Op, (convLayerFlow1.Filter as IPersistable <T>).Name); convfilterCore1.Result = BuilderInstance <T> .Volume.SameAs(convLayerCore1.Filters.ToArray(), convLayerCore1.Filters.Shape); var fullyfilterCore = netFlow.Session.GetVariableByName(netFlow.Op, (fullyConnLayerFlow.Filter as IPersistable <T>).Name); fullyfilterCore.Result = BuilderInstance <T> .Volume.SameAs(fullyConnLayerCore.Filters.ToArray(), fullyConnLayerCore.Filters.Shape); // Create input var xStorage = new double[inputWidth * inputHeigth * inputDepth * batchSize].Populate(1.0); var x = NewVolume(xStorage, Volume.Shape.From(inputWidth, inputHeigth, inputDepth, batchSize)); // Create output var yStorage = new double[10 * batchSize]; var y = NewVolume(yStorage, Volume.Shape.From(1, 1, 10, batchSize)); for (var i = 0; i < batchSize; i++) { y.Set(0, 0, i % 10, i, Ops <T> .One); } for (var k = 0; k < 10; k++) { xStorage = new double[inputWidth * inputHeigth * inputDepth * batchSize].Populate(1.0 + k); x = NewVolume(xStorage, Volume.Shape.From(inputWidth, inputHeigth, inputDepth, batchSize)); var flowResult = netFlow.Forward(x); var coreResult = netCore.Forward(x); var sum1 = BuilderInstance <T> .Volume.SameAs(new Shape(1)); flowResult.DoSum(sum1); var sum2 = BuilderInstance <T> .Volume.SameAs(new Shape(1)); coreResult.DoSum(sum2); var diff = Ops <T> .Subtract(sum1.Get(0), sum2.Get(0)); Console.WriteLine(diff); AssertNumber.AreSequenceEqual(flowResult.ToArray(), coreResult.ToArray(), 1e-6); trainerCore.Train(x, y); trainerFlow.Train(x, y); } }
public static Network CreateVGG16Network(int imageWidth, int imageHeight, int LabelsCount) { Network net = new Network(); InputLayer il = new InputLayer(); il.OutputWidth = imageWidth; il.OutputHeight = imageHeight; il.OutputDepth = 3; net.Layers.Add(il); ConvLayer conv1_1 = new ConvLayer(64, 3, 3, 3, imageWidth, imageHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv1_1); ReluLayer rl1 = new ReluLayer(conv1_1.OutputDepth, conv1_1.OutputWidth, conv1_1.OutputHeight); net.Layers.Add(rl1); ConvLayer conv1_2 = new ConvLayer(64, 3, 3, rl1.OutputDepth, rl1.OutputWidth, rl1.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv1_2); ReluLayer rl2 = new ReluLayer(conv1_2.OutputDepth, conv1_2.OutputWidth, conv1_2.OutputHeight); net.Layers.Add(rl2); MaxPoolLayer pl1 = new MaxPoolLayer(2, 2, rl2.OutputDepth, rl2.OutputWidth, rl2.OutputHeight, 2, 0); net.Layers.Add(pl1); ConvLayer conv2_1 = new ConvLayer(128, 3, 3, pl1.OutputDepth, pl1.OutputWidth, pl1.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv2_1); ReluLayer rl3 = new ReluLayer(conv2_1.OutputDepth, conv2_1.OutputWidth, conv2_1.OutputHeight); net.Layers.Add(rl3); ConvLayer conv2_2 = new ConvLayer(128, 3, 3, rl3.OutputDepth, rl3.OutputWidth, rl3.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv2_2); ReluLayer rl4 = new ReluLayer(conv2_2.OutputDepth, conv2_2.OutputWidth, conv2_2.OutputHeight); net.Layers.Add(rl4); MaxPoolLayer pl2 = new MaxPoolLayer(2, 2, rl4.OutputDepth, rl4.OutputWidth, rl4.OutputHeight, 2, 0); net.Layers.Add(pl2); ConvLayer conv3_1 = new ConvLayer(256, 3, 3, pl2.OutputDepth, pl2.OutputWidth, pl2.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv3_1); ReluLayer rl5 = new ReluLayer(conv3_1.OutputDepth, conv3_1.OutputWidth, conv3_1.OutputHeight); net.Layers.Add(rl5); ConvLayer conv3_2 = new ConvLayer(256, 3, 3, rl5.OutputDepth, rl5.OutputWidth, rl5.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv3_2); ReluLayer rl6 = new ReluLayer(conv3_2.OutputDepth, conv3_2.OutputWidth, conv3_2.OutputHeight); net.Layers.Add(rl6); ConvLayer conv3_3 = new ConvLayer(256, 3, 3, rl6.OutputDepth, rl6.OutputWidth, rl6.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv3_3); ReluLayer rl7 = new ReluLayer(conv3_3.OutputDepth, conv3_3.OutputWidth, conv3_3.OutputHeight); net.Layers.Add(rl7); MaxPoolLayer pl3 = new MaxPoolLayer(2, 2, rl7.OutputDepth, rl7.OutputWidth, rl7.OutputHeight, 2, 0); net.Layers.Add(pl3); ConvLayer conv4_1 = new ConvLayer(512, 3, 3, pl3.OutputDepth, pl3.OutputWidth, pl3.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv4_1); ReluLayer rl8 = new ReluLayer(conv4_1.OutputDepth, conv4_1.OutputWidth, conv4_1.OutputHeight); net.Layers.Add(rl8); ConvLayer conv4_2 = new ConvLayer(512, 3, 3, rl8.OutputDepth, rl8.OutputWidth, rl8.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv4_2); ReluLayer rl9 = new ReluLayer(conv4_2.OutputDepth, conv4_2.OutputWidth, conv4_2.OutputHeight); net.Layers.Add(rl9); ConvLayer conv4_3 = new ConvLayer(512, 3, 3, rl9.OutputDepth, rl9.OutputWidth, rl9.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv4_3); ReluLayer rl10 = new ReluLayer(conv4_3.OutputDepth, conv4_3.OutputWidth, conv4_3.OutputHeight); net.Layers.Add(rl10); MaxPoolLayer pl4 = new MaxPoolLayer(2, 2, rl10.OutputDepth, rl10.OutputWidth, rl10.OutputHeight, 2, 0); net.Layers.Add(pl4); ConvLayer conv5_1 = new ConvLayer(512, 3, 3, pl4.OutputDepth, pl4.OutputWidth, pl4.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv5_1); ReluLayer rl11 = new ReluLayer(conv5_1.OutputDepth, conv5_1.OutputWidth, conv5_1.OutputHeight); net.Layers.Add(rl11); ConvLayer conv5_2 = new ConvLayer(512, 3, 3, rl11.OutputDepth, rl11.OutputWidth, rl11.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv5_2); ReluLayer rl12 = new ReluLayer(conv5_2.OutputDepth, conv5_2.OutputWidth, conv5_2.OutputHeight); net.Layers.Add(rl12); ConvLayer conv5_3 = new ConvLayer(512, 3, 3, rl12.OutputDepth, rl12.OutputWidth, rl12.OutputHeight, 1, 1, 0, 1, 0.1f); net.Layers.Add(conv5_3); ReluLayer rl13 = new ReluLayer(conv5_3.OutputDepth, conv5_3.OutputWidth, conv5_3.OutputHeight); net.Layers.Add(rl13); MaxPoolLayer pl5 = new MaxPoolLayer(2, 2, rl13.OutputDepth, rl13.OutputWidth, rl13.OutputHeight, 2, 0); net.Layers.Add(pl5); FullyConnLayer fc = new FullyConnLayer(4096, pl5.OutputDepth, pl5.OutputWidth, pl5.OutputHeight, 0, 1, 0); net.Layers.Add(fc); ReluLayer rl14 = new ReluLayer(fc.OutputDepth, fc.OutputWidth, fc.OutputHeight); net.Layers.Add(rl14); DropoutLayer d = new DropoutLayer(rl14.OutputDepth, rl14.OutputWidth, rl14.OutputHeight, 0.5f); net.Layers.Add(d); FullyConnLayer fc2 = new FullyConnLayer(4096, d.OutputDepth, d.OutputWidth, d.OutputHeight, 0, 1, 0); net.Layers.Add(fc2); ReluLayer rl15 = new ReluLayer(fc2.OutputDepth, fc2.OutputWidth, fc2.OutputHeight); net.Layers.Add(rl15); DropoutLayer d2 = new DropoutLayer(rl15.OutputDepth, rl15.OutputWidth, rl15.OutputHeight, 0.5f); net.Layers.Add(d2); FullyConnLayer fc3 = new FullyConnLayer(LabelsCount, d2.OutputDepth, d2.OutputWidth, d2.OutputHeight, 0, 1, 0); net.Layers.Add(fc3); SoftmaxLayer sl = new SoftmaxLayer(fc3.OutputDepth, fc3.OutputWidth, fc3.OutputHeight); net.LossLayer = sl; return(net); }