public void Test_ReLu_Execute() { relu = new ReLuLayer(); Data2D data = new Data2D(2, 3, 1, 1); data[0, 0, 0, 0] = 4; data[0, 1, 0, 0] = 2; data[0, 2, 0, 0] = -2; data[1, 0, 0, 0] = 3; data[1, 1, 0, 0] = -1; data[1, 2, 0, 0] = -3; relu.SetInput(data); relu.Execute(); Data2D output = relu.GetOutput() as Data2D; Assert.AreEqual(output[0, 0, 0, 0], 4.0, 0.00000001); Assert.AreEqual(output[0, 1, 0, 0], 2.0, 0.00000001); Assert.AreEqual(output[0, 2, 0, 0], 0.0, 0.00000001); Assert.AreEqual(output[1, 0, 0, 0], 3.0, 0.00000001); Assert.AreEqual(output[1, 1, 0, 0], 0.0, 0.00000001); Assert.AreEqual(output[1, 2, 0, 0], 0.0, 0.00000001); }
public static IConvLayer ToConvLayer(this CnnLayer layer) { switch (layer.LayerType) { case (byte)LayerType.CovolutionalLayer: var convLayer = new ConvolutionalLayer { Kernels = new List <double[][][]>(), KernelPadding = 0, KernelStride = 1, KernelSize = layer.KernelHeight }; var weights = layer.Weights.Weights.Split(';'); for (int i = 0; i < layer.KernelsCount; ++i) { var kernels = new double[layer.FeatureMapsCountIn][][]; for (int j = 0; j < layer.FeatureMapsCountIn; ++j) { kernels[j] = new double[layer.KernelHeight][]; for (int a = 0; a < layer.KernelHeight; ++a) { kernels[j][a] = new double[layer.KernelWidth]; for (int b = 0; b < layer.KernelWidth; ++b) { kernels[j][a][b] = double.Parse(weights[j * layer.KernelHeight * layer.KernelWidth + a * layer.KernelWidth + b]); } } } convLayer.Kernels.Add(kernels); } return(convLayer); case (byte)LayerType.PoolingLayer: var poolingLayer = new PollingLayer(layer.KernelHeight, 0, 1); return(poolingLayer); case (byte)LayerType.ReluLayer: var reluLayer = new ReLuLayer(); return(reluLayer); default: throw new Exception(); } }
public static IConvLayer Create(int neuronsCount = 0, int inputMapsCount = 0, int kernelsCount = 0, byte layerType = 0, double lr = 0, int prevNeuronsCount = 0) { switch (layerType) { case (byte)LayerType.CovolutionalLayer: var convLayer = new ConvolutionalLayer { Kernels = new List <double[][][]>(), LearningRate = lr, KernelPadding = 0, KernelStride = 1, KernelSize = 3 }; var kernels = new double[inputMapsCount][][]; for (int i = 0; i < kernelsCount; ++i) { for (int j = 0; j < inputMapsCount; ++j) { kernels[j] = CreateKernel(3, neuronsCount); } convLayer.Kernels.Add(kernels); } return(convLayer); case (byte)LayerType.PoolingLayer: var poolingLayer = new PollingLayer(2, 0, 1); return(poolingLayer); case (byte)LayerType.ReluLayer: var reluLayer = new ReLuLayer(); return(reluLayer); default: throw new Exception(); } }
public static ConvolutionalNeuralNetwork Parse(string filename) { var jobject = JObject.Parse(File.ReadAllText(filename)); var jsonLayers = jobject.GetValue("layers"); var network = new ConvolutionalNeuralNetwork(); var netLayers = new List <CNNLayer>(); foreach (var jsonLayer in jsonLayers.ToArray()) { var type = jsonLayer["layer_type"].ToObject <string>(); CNNLayer layer; switch (type) { case "input": layer = new InputLayer(); break; case "conv": layer = new ConvolutionalLayer { FilterSize = jsonLayer["sx"].ToObject <int>(), Stride = jsonLayer["stride"].ToObject <int>(), L1Decay = jsonLayer["l1_decay_mul"].ToObject <double>(), L2Decay = jsonLayer["l2_decay_mul"].ToObject <double>(), Pad = jsonLayer["pad"].ToObject <int>(), Biases = Volume.ParseJSON(jsonLayer["biases"]), Kernels = Volume.ArrayParseJSON(jsonLayer["filters"]) }; break; case "relu": layer = new ReLuLayer(); break; case "pool": layer = new SubsamplingLayer { FilterSize = jsonLayer["sx"].ToObject <int>(), Stride = jsonLayer["stride"].ToObject <int>(), Pad = jsonLayer["pad"].ToObject <int>(), }; break; case "fc": layer = new FullyConnectedLayer { L1Decay = jsonLayer["l1_decay_mul"].ToObject <double>(), L2Decay = jsonLayer["l2_decay_mul"].ToObject <double>(), NeuronsCount = jsonLayer["out_depth"].ToObject <int>(), Biases = Volume.ParseJSON(jsonLayer["biases"]), Weights = Volume.ArrayParseJSON(jsonLayer["filters"]) }; break; case "softmax": layer = new SoftmaxLayer(); break; default: throw new ArgumentException("Unknown layer type"); } if (netLayers.Count != 0) { layer.InSize = netLayers.Last().OutSize; layer.InDepth = netLayers.Last().OutDepth; } layer.OutDepth = jsonLayer["out_depth"].ToObject <int>(); layer.OutSize = new Size( jsonLayer["out_sx"].ToObject <int>(), jsonLayer["out_sy"].ToObject <int>()); if (type == "fc") { (layer as FullyConnectedLayer)._inputsCount = layer.InSize.Width * layer.InSize.Height * layer.InDepth; } else if (type == "pool") { (layer as SubsamplingLayer)._oldX = new int[layer.OutSize.Width * layer.OutSize.Height * layer.OutDepth]; (layer as SubsamplingLayer)._oldY = new int[layer.OutSize.Width * layer.OutSize.Height * layer.OutDepth]; } else if (type == "softmax") { (layer as SoftmaxLayer)._es = new double[layer.OutDepth]; } layer.OutVolume = new Volume(layer.OutSize.Width, layer.OutSize.Height, layer.OutDepth, 0); netLayers.Add(layer); } network.Layers = netLayers; network.InputLayer = (InputLayer)netLayers.First(); return(network); }