public void ExecuteTest() { float max_err = 0; Random rd = new Random(1234); foreach (int batch in new int[] { 1, 2 }) { foreach (int channels in new int[] { 1, 2, 3, 4, 5, 6, 7, 8 }) { foreach (int stride in new int[] { 2, 3, 4 }) { foreach (int inwidth in new int[] { 5, 7, 11 }) { foreach (int inheight in new int[] { 5, 7, 11 }) { int outwidth = inwidth / stride, outheight = inheight / stride; float[] xval = (new float[inwidth * inheight * channels * batch]).Select((_) => (float)rd.NextDouble()).ToArray(); float[] gyval = (new float[outwidth * outheight * channels * batch]).Select((_) => (float)rd.NextDouble()).ToArray(); Map2D x = new Map2D(channels, inwidth, inheight, batch, xval); Map2D gy = new Map2D(channels, outwidth, outheight, batch, gyval); Map2D gx = Reference(x, gy, stride); OverflowCheckedTensor x_tensor = new OverflowCheckedTensor(Shape.Map2D(channels, inwidth, inheight, batch), xval); OverflowCheckedTensor y_tensor = new OverflowCheckedTensor(Shape.Map2D(channels, outwidth, outheight, batch)); OverflowCheckedTensor gy_tensor = new OverflowCheckedTensor(Shape.Map2D(channels, outwidth, outheight, batch), gyval); OverflowCheckedTensor gx_tensor = new OverflowCheckedTensor(Shape.Map2D(channels, inwidth, inheight, batch)); MaxPooling ope_pool = new MaxPooling(inwidth, inheight, channels, stride, batch); ope_pool.Execute(x_tensor, y_tensor); MaxUnpooling ope_unpool = new MaxUnpooling(inwidth, inheight, channels, stride, batch); ope_unpool.Execute(gy_tensor, x_tensor, y_tensor, gx_tensor); float[] gx_expect = gx.ToArray(); float[] gx_actual = gx_tensor.State; int gx_expect_nonzero = gx_expect.Count((v) => v != 0); int gx_actual_nonzero = gx_expect.Count((v) => v != 0); CollectionAssert.AreEqual(xval, x_tensor.State); CollectionAssert.AreEqual(gyval, gy_tensor.State); Assert.AreEqual(y_tensor.Length, gx_expect_nonzero); Assert.AreEqual(y_tensor.Length, gx_actual_nonzero); AssertError.Tolerance(gx_expect, gx_actual, 1e-7f, 1e-5f, ref max_err, $"mismatch value {channels},{stride},{inwidth},{inheight},{batch}"); Console.WriteLine($"pass: {channels},{stride},{inwidth},{inheight},{batch}"); } } } } } Console.WriteLine($"maxerr:{max_err}"); }
public void MaxPoolingStorageTest() { var max = new MaxPooling(2, 2); var doc = new XmlDocument(); var el = XmlStorage.SaveToEl(doc, max); var test = XmlStorage.LoadFromNode <MaxPooling>(el); Assert.True(test is MaxPooling); Assert.Equal(max.Rows, test.Rows); Assert.Equal(max.Columns, test.Columns); Assert.Equal(max.RowStride, test.RowStride); Assert.Equal(max.ColumnStride, test.ColumnStride); }
public static void Run() { sw = new Stopwatch(); Console.WriteLine("Generating Test Data..."); NdArray input = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE)); NdArray inputImage = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5); Console.WriteLine("Generated Test Data"); Console.WriteLine("Init Linear"); Linear linear = new Linear(INPUT_SIZE, OUTPUT_SIZE); Console.WriteLine("Init Tanh"); Tanh tanh = new Tanh(); Console.WriteLine("Init Sigmoid"); Sigmoid sigmoid = new Sigmoid(); Console.WriteLine("Init ReLU"); ReLU relu = new ReLU(); Console.WriteLine("Init LeakyReLU"); LeakyReLU leakyRelu = new LeakyReLU(); Console.WriteLine("Init MaxPooling"); MaxPooling maxPooling = new MaxPooling(2); Console.WriteLine("Init Convolution2D"); Convolution2D conv2d = new Convolution2D(3, 32, 3); Console.WriteLine("Init Deconvolution2D"); Deconvolution2D deconv2d = new Deconvolution2D(32, 3, 3); Dropout dropout = new Dropout(); TestLayer(linear, input); Console.WriteLine("aaaaaaaaaaaa"); Console.ReadLine(); TestLayer(tanh, input); TestLayer(sigmoid, input); TestLayer(relu, input); TestLayer(leakyRelu, input); TestLayer(maxPooling, inputImage); TestLayer(conv2d, inputImage); TestLayer(deconv2d, inputImage); TestLayer(dropout, input); }
public void ExecuteTest() { float max_err = 0; foreach (int batch in new int[] { 1, 2 }) { foreach (int channels in new int[] { 3, 5 }) { foreach (int stride in new int[] { 2, 3, 4 }) { foreach (int inwidth in new int[] { 5, 7, 11 }) { foreach (int inheight in new int[] { 5, 7, 11 }) { foreach (int indepth in new int[] { 5, 7, 11 }) { int outwidth = inwidth / stride, outheight = inheight / stride, outdepth = indepth / stride; float[] xval = (new float[inwidth * inheight * indepth * channels * batch]).Select((_, idx) => idx * 1e-3f).ToArray(); Map3D x = new Map3D(channels, inwidth, inheight, indepth, batch, xval); Map3D y = Reference(x, stride); OverflowCheckedTensor x_tensor = new OverflowCheckedTensor(Shape.Map3D(channels, inwidth, inheight, indepth, batch), xval); OverflowCheckedTensor y_tensor = new OverflowCheckedTensor(Shape.Map3D(channels, outwidth, outheight, outdepth, batch)); MaxPooling ope = new MaxPooling(inwidth, inheight, indepth, channels, stride, batch); ope.Execute(x_tensor, y_tensor); float[] y_expect = y.ToArray(); float[] y_actual = y_tensor.State; CollectionAssert.AreEqual(xval, x_tensor.State); AssertError.Tolerance(y_expect, y_actual, 1e-7f, 1e-5f, ref max_err, $"mismatch value {channels},{stride},{inwidth},{inheight},{indepth},{batch}"); Console.WriteLine($"pass: {channels},{stride},{inwidth},{inheight},{batch}"); } } } } } } Console.WriteLine($"maxerr:{max_err}"); }
public void ForwardBackwardTest() { var data = new double[] { 3, 2, 1, 5, 4, 2, 6, 1, 8, 2, 6, 3, 6, 3, 9, 5 }; var input = new TensorOld(data, 1, 1, 4, 4); var pooling = new MaxPooling(2); pooling.PrepareTrain(input); var forward = pooling.Forward(input); var expected = new TensorOld(new double[] { 4, 6, 8, 9 }, 1, 1, 2, 2); Assert.Equal(expected, forward); var error = new TensorOld(new double[] { 0.5, 0.7, -0.3, 1.2 }, 1, 1, 2, 2); var backward = pooling.Backward(error); var backExpected = new TensorOld(new double[] { 0, 0, 0, 0, 0.5, 0, 0.7, 0, -0.3, 0, 0, 0, 0, 0, 1.2, 0 }, 1, 1, 4, 4); Assert.Equal(backExpected, backward); }
public void SpeedTest() { int inwidth = 512, inheight = 512, channels = 32, stride = 2; int outwidth = inwidth / stride, outheight = inheight / stride; OverflowCheckedTensor x_tensor = new OverflowCheckedTensor(Shape.Map2D(channels, inwidth, inheight)); OverflowCheckedTensor y_tensor = new OverflowCheckedTensor(Shape.Map2D(channels, outwidth, outheight)); MaxPooling ope = new MaxPooling(inwidth, inheight, channels, stride); Stopwatch sw = new Stopwatch(); sw.Start(); ope.Execute(x_tensor, y_tensor); ope.Execute(x_tensor, y_tensor); ope.Execute(x_tensor, y_tensor); ope.Execute(x_tensor, y_tensor); sw.Stop(); Console.WriteLine($"{sw.ElapsedMilliseconds / 4} msec"); }
static void Main(string[] args) { Console.WriteLine("MNIST Test"); int seed; using (var rng = new RNGCryptoServiceProvider()) { var buffer = new byte[sizeof(int)]; rng.GetBytes(buffer); seed = BitConverter.ToInt32(buffer, 0); } RandomProvider.SetSeed(seed); var assembly = Assembly.GetExecutingAssembly(); var filename = "CNN.xml"; var serializer = new DataContractSerializer(typeof(IEnumerable <Layer>), new Type[] { typeof(Convolution), typeof(BatchNormalization), typeof(Activation), typeof(ReLU), typeof(MaxPooling), typeof(FullyConnected), typeof(Softmax) }); var random = RandomProvider.GetRandom(); var trainingList = new List <Tuple <double[], double[]> >(); var testList = new List <Tuple <double[], double[]> >(); var accuracyList = new List <double>(); var lossList = new List <double>(); var logPath = "Log.csv"; var channels = 1; var imageWidth = 28; var imageHeight = 28; var filters = 30; var filterWidth = 5; var filterHeight = 5; var poolWidth = 2; var poolHeight = 2; var activationMapWidth = Convolution.GetActivationMapLength(imageWidth, filterWidth); var activationMapHeight = Convolution.GetActivationMapLength(imageHeight, filterHeight); var outputWidth = MaxPooling.GetOutputLength(activationMapWidth, poolWidth); var outputHeight = MaxPooling.GetOutputLength(activationMapHeight, poolHeight); Model model; using (Stream imagesStream = assembly.GetManifestResourceStream("MNISTTest.train-images.idx3-ubyte"), labelsStream = assembly.GetManifestResourceStream("MNISTTest.train-labels.idx1-ubyte")) { foreach (var image in MnistImage.Load(imagesStream, labelsStream).Take(1000)) { var t = new double[10]; for (int i = 0; i < 10; i++) { if (i == image.Label) { t[i] = 1.0; } else { t[i] = 0.0; } } trainingList.Add(Tuple.Create <double[], double[]>(image.Normalize(), t)); } } using (Stream imagesStream = assembly.GetManifestResourceStream("MNISTTest.t10k-images.idx3-ubyte"), labelsStream = assembly.GetManifestResourceStream("MNISTTest.t10k-labels.idx1-ubyte")) { foreach (var image in MnistImage.Load(imagesStream, labelsStream).Take(1000)) { var t = new double[10]; for (int i = 0; i < 10; i++) { if (i == image.Label) { t[i] = 1.0; } else { t[i] = 0.0; } } testList.Add(Tuple.Create <double[], double[]>(image.Normalize(), t)); } } if (File.Exists(filename)) { using (XmlReader xmlReader = XmlReader.Create(filename)) { model = new Model((IEnumerable <Layer>)serializer.ReadObject(xmlReader), new Adam(), new SoftmaxCrossEntropy()); } } else { /*model = new Model(new Layer[] { * new Convolutional(channels, imageWidth, imageHeight, filters, filterWidth, filterHeight, (fanIn, fanOut) => Initializers.HeNormal(fanIn)), * new Activation(filters * activationMapWidth * activationMapHeight, new ReLU()), * new MaxPooling(filters, activationMapWidth, activationMapHeight, poolWidth, poolHeight), * new FullyConnected(filters * outputWidth * outputHeight, 100, (fanIn, fanOut) => Initializers.HeNormal(fanIn)), * new Activation(100, new ReLU()), * new Softmax(100, 10, (fanIn, fanOut) => Initializers.GlorotNormal(fanIn, fanOut)) * }, new Adam(), new SoftmaxCrossEntropy());*/ /*var inputLayer = new Convolutional(channels, imageWidth, imageHeight, filters, filterWidth, filterHeight, (fanIn, fanOut) => Initializers.HeNormal(fanIn)); * * new Softmax( * new Activation( * new FullyConnected( * new MaxPooling( * new Activation(inputLayer, new ReLU()), * filters, inputLayer.ActivationMapWidth, inputLayer.ActivationMapHeight, poolWidth, poolHeight), * 100, (fanIn, fanOut) => Initializers.HeNormal(fanIn)), * new ReLU()), * 10, (fanIn, fanOut) => Initializers.GlorotNormal(fanIn, fanOut)); * * model = new Model(inputLayer, new Adam(), new SoftmaxCrossEntropy());*/ model = new Model( new Convolution(channels, imageWidth, imageHeight, filters, filterWidth, filterHeight, (fanIn, fanOut) => Initializers.HeNormal(fanIn), new Activation(new ReLU(), new MaxPooling(filters, activationMapWidth, activationMapHeight, poolWidth, poolHeight, new FullyConnected(filters * outputWidth * outputHeight, (fanIn, fanOut) => Initializers.HeNormal(fanIn), new Activation(new ReLU(), new Softmax(100, 10, (fanIn, fanOut) => Initializers.GlorotNormal(fanIn, fanOut))))))), new Adam(), new SoftmaxCrossEntropy()); int epochs = 50; int iterations = 1; model.Stepped += (sender, e) => { double tptn = 0.0; trainingList.ForEach(x => { var vector = model.Predicate(x.Item1); var i = ArgMax(vector); var j = ArgMax(x.Item2); if (i == j && Math.Round(vector[i]) == x.Item2[j]) { tptn += 1.0; } }); var accuracy = tptn / trainingList.Count; accuracyList.Add(accuracy); lossList.Add(model.Loss); Console.WriteLine("Epoch {0}/{1}", iterations, epochs); Console.WriteLine("Accuracy: {0}, Loss: {1}", accuracy, model.Loss); iterations++; }; Console.WriteLine("Training..."); var stopwatch = Stopwatch.StartNew(); model.Fit(trainingList, epochs, 100); stopwatch.Stop(); Console.WriteLine("Done ({0}).", stopwatch.Elapsed.ToString()); } double testTptn = 0.0; testList.ForEach(x => { var vector = model.Predicate(x.Item1); var i = ArgMax(vector); var j = ArgMax(x.Item2); if (i == j && Math.Round(vector[i]) == x.Item2[j]) { testTptn += 1.0; } }); Console.WriteLine("Accuracy: {0}", testTptn / testList.Count); if (accuracyList.Count > 0) { var logDictionary = new Dictionary <string, IEnumerable <double> >(); logDictionary.Add("Accuracy", accuracyList); logDictionary.Add("Loss", lossList); ToCsv(logPath, logDictionary); Console.WriteLine("Saved log to {0}...", logPath); } XmlWriterSettings settings = new XmlWriterSettings(); settings.Indent = true; settings.Encoding = new System.Text.UTF8Encoding(false); using (XmlWriter xmlWriter = XmlWriter.Create(filename, settings)) { serializer.WriteObject(xmlWriter, model.Layers); xmlWriter.Flush(); } }
public static void Run() { Stopwatch sw = new Stopwatch(); NdArray inputArrayCpu = new NdArray(Initializer.GetRealArray(INPUT_SIZE)); NdArray inputArrayGpu = new NdArray(Initializer.GetRealArray(INPUT_SIZE)); //Linear Linear linear = new Linear(INPUT_SIZE, OUTPUT_SIZE); Console.WriteLine("◆" + linear.Name); sw.Restart(); NdArray[] gradArrayCpu = linear.Forward(inputArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; //DataをGradとして使用 sw.Restart(); linear.Backward(gradArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (linear.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = linear.Forward(inputArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); linear.Backward(gradArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } //Tanh TanhActivation tanh = new TanhActivation(); Console.WriteLine("\n◆" + tanh.Name); sw.Restart(); gradArrayCpu = tanh.Forward(inputArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); tanh.Backward(gradArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (tanh.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = tanh.Forward(inputArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); tanh.Backward(gradArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } //Sigmoid Sigmoid sigmoid = new Sigmoid(); Console.WriteLine("\n◆" + sigmoid.Name); sw.Restart(); gradArrayCpu = sigmoid.Forward(inputArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); sigmoid.Backward(gradArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (sigmoid.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = sigmoid.Forward(inputArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); sigmoid.Backward(gradArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } //ReLU ReLU relu = new ReLU(); Console.WriteLine("\n◆" + relu.Name); sw.Restart(); gradArrayCpu = relu.Forward(inputArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); relu.Backward(gradArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (relu.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = relu.Forward(inputArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); relu.Backward(gradArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } //LeakyReLU LeakyReLU leakyRelu = new LeakyReLU(); Console.WriteLine("\n◆" + leakyRelu.Name); sw.Restart(); gradArrayCpu = leakyRelu.Forward(inputArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); leakyRelu.Backward(gradArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (leakyRelu.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = leakyRelu.Forward(inputArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); leakyRelu.Backward(gradArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } NdArray inputImageArrayGpu = new NdArray(Initializer.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5); NdArray inputImageArrayCpu = new NdArray(Initializer.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5); //MaxPooling MaxPooling maxPooling = new MaxPooling(3); Console.WriteLine("\n◆" + maxPooling.Name); sw.Restart(); NdArray[] gradImageArrayCpu = maxPooling.Forward(inputImageArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data; sw.Restart(); maxPooling.Backward(gradImageArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (maxPooling.SetGpuEnable(true)) { sw.Restart(); maxPooling.Forward(inputImageArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); //メモリ転送のみのため実装がない Console.WriteLine("Backward[Gpu] : None"); } //Conv2D Convolution2D conv2d = new Convolution2D(3, 3, 3); Console.WriteLine("\n◆" + conv2d.Name); sw.Restart(); gradImageArrayCpu = conv2d.Forward(inputImageArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data; sw.Restart(); conv2d.Backward(gradImageArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (conv2d.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradImageArrayGpu = conv2d.Forward(inputImageArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayGpu[0].Grad = gradImageArrayGpu[0].Data; sw.Restart(); conv2d.Backward(gradImageArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } //Deconv2D Deconvolution2D deconv2d = new Deconvolution2D(3, 3, 3); Console.WriteLine("\n◆" + deconv2d.Name); sw.Restart(); gradImageArrayCpu = deconv2d.Forward(inputImageArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data; sw.Restart(); deconv2d.Backward(gradImageArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (deconv2d.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradImageArrayGpu = deconv2d.Forward(inputImageArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayGpu[0].Grad = gradImageArrayGpu[0].Data; sw.Restart(); deconv2d.Backward(gradImageArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } //Dropout Dropout dropout = new Dropout(); Console.WriteLine("\n◆" + dropout.Name); sw.Restart(); gradArrayCpu = dropout.Forward(inputArrayCpu); sw.Stop(); Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); dropout.Backward(gradArrayCpu); sw.Stop(); Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (dropout.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = dropout.Forward(inputArrayGpu); sw.Stop(); Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); dropout.Backward(gradArrayGpu); sw.Stop(); Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } }
public static void Run(bool verbose) { Stopwatch sw = new Stopwatch(); NdArray inputArrayCpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE)); NdArray inputArrayGpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE)); Ensure.Argument(inputArrayGpu).NotNull(); Ensure.Argument(inputArrayCpu).NotNull(); //Linear Linear linear = new Linear(verbose, INPUT_SIZE, OUTPUT_SIZE); if (verbose) { RILogManager.Default?.EnterMethod(linear.Name); } sw.Restart(); NdArray[] gradArrayCpu = linear.Forward(verbose, inputArrayCpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } Ensure.Argument(gradArrayCpu).NotNull(); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; // Use Data as Grad sw.Restart(); linear.Backward(verbose, gradArrayCpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } if (linear.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = linear.Forward(verbose, inputArrayGpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); linear.Backward(verbose, gradArrayGpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } } if (verbose) { RILogManager.Default?.ExitMethod(linear.Name); } //Tanh Tanh tanh = new Tanh(); if (verbose) { RILogManager.Default?.EnterMethod(tanh.Name); } sw.Restart(); gradArrayCpu = tanh.Forward(verbose, inputArrayCpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); tanh.Backward(verbose, gradArrayCpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } if (tanh.SetGpuEnable(true)) { HandleGPU(verbose, sw, tanh, inputArrayGpu); } if (verbose) { RILogManager.Default?.ExitMethod(tanh.Name); } //Sigmoid Sigmoid sigmoid = new Sigmoid(); if (verbose) { RILogManager.Default?.EnterMethod(sigmoid.Name); } sw.Restart(); gradArrayCpu = sigmoid.Forward(verbose, inputArrayCpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); sigmoid.Backward(verbose, gradArrayCpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } if (sigmoid.SetGpuEnable(true)) { HandleGPU(verbose, sw, sigmoid, inputArrayGpu); } if (verbose) { RILogManager.Default?.ExitMethod(tanh.Name); } //Softmax Softmax sm = new Softmax(); RILogManager.Default?.EnterMethod(sm.Name); sw.Restart(); gradArrayCpu = sm.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); sm.Backward(verbose, gradArrayCpu); sw.Stop(); if (verbose) { RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } if (verbose) { RILogManager.Default?.ExitMethod(sm.Name); } //Softplus Softplus sp = new Softplus(); if (verbose) { RILogManager.Default?.EnterMethod(sp.Name); } sw.Restart(); gradArrayCpu = sp.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); sp.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); RILogManager.Default?.ExitMethod(sp.Name); //ReLU ReLU relu = new ReLU(); RILogManager.Default?.EnterMethod(relu.Name); sw.Restart(); gradArrayCpu = relu.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); relu.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (relu.SetGpuEnable(true)) { HandleGPU(verbose, sw, relu, inputArrayGpu); } RILogManager.Default?.ExitMethod(relu.Name); //LeakyReLU LeakyReLU leakyRelu = new LeakyReLU(); RILogManager.Default?.EnterMethod(leakyRelu.Name); sw.Restart(); gradArrayCpu = leakyRelu.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); leakyRelu.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (leakyRelu.SetGpuEnable(true)) { HandleGPU(verbose, sw, leakyRelu, inputArrayGpu); } RILogManager.Default?.ExitMethod(leakyRelu.Name); //ReLuTanh ReLuTanh rth = new ReLuTanh(); RILogManager.Default?.EnterMethod(rth.Name); sw.Restart(); gradArrayCpu = rth.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); rth.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (rth.SetGpuEnable(true)) { HandleGPU(verbose, sw, rth, inputArrayGpu); } RILogManager.Default?.ExitMethod(rth.Name); ////Swish //Swish swi = new Swish(); //RILogManager.Default?.SendDebug(swi.Name); //sw.Restart(); //gradArrayCpu = swi.Forward(inputArrayCpu); //sw.Stop(); //RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); //gradArrayCpu[0].Grad = gradArrayCpu[0].Data; //sw.Restart(); //swi.Backward(gradArrayCpu); //sw.Stop(); //RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); NdArray inputImageArrayGpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5); NdArray inputImageArrayCpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5); //MaxPooling MaxPooling maxPooling = new MaxPooling(3); RILogManager.Default?.EnterMethod(maxPooling.Name); sw.Restart(); NdArray[] gradImageArrayCpu = maxPooling.Forward(verbose, inputImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data; sw.Restart(); maxPooling.Backward(verbose, gradImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (maxPooling.SetGpuEnable(true)) { sw.Restart(); maxPooling.Forward(verbose, inputImageArrayGpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); // There is no implementation for memory transfer only RILogManager.Default?.SendDebug("Backward[Gpu] : None"); } RILogManager.Default?.ExitMethod(maxPooling.Name); //AvgPooling AveragePooling avgPooling = new AveragePooling(3); RILogManager.Default?.EnterMethod(avgPooling.Name); sw.Restart(); gradImageArrayCpu = avgPooling.Forward(verbose, inputImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data; sw.Restart(); avgPooling.Backward(verbose, gradImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); RILogManager.Default?.ExitMethod(avgPooling.Name); //Conv2D Convolution2D conv2d = new Convolution2D(verbose, 3, 3, 3); RILogManager.Default?.EnterMethod(conv2d.Name); sw.Restart(); gradImageArrayCpu = conv2d.Forward(verbose, inputImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data; sw.Restart(); conv2d.Backward(verbose, gradImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (conv2d.SetGpuEnable(true)) { HandleGPU(verbose, sw, conv2d, inputArrayGpu); } RILogManager.Default?.ExitMethod(conv2d.Name); //Deconv2D Deconvolution2D deconv2d = new Deconvolution2D(verbose, 3, 3, 3); RILogManager.Default?.EnterMethod(deconv2d.Name); sw.Restart(); gradImageArrayCpu = deconv2d.Forward(verbose, inputImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data; sw.Restart(); deconv2d.Backward(verbose, gradImageArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (deconv2d.SetGpuEnable(true)) { HandleGPU(verbose, sw, deconv2d, inputArrayGpu); } RILogManager.Default?.ExitMethod(deconv2d.Name); //Dropout Dropout dropout = new Dropout(); RILogManager.Default?.EnterMethod(dropout.Name); sw.Restart(); gradArrayCpu = dropout.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); dropout.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (dropout.SetGpuEnable(true)) { sw.Restart(); NdArray[] gradArrayGpu = dropout.Forward(verbose, inputArrayGpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayGpu[0].Grad = gradArrayGpu[0].Data; sw.Restart(); dropout.Backward(verbose, gradArrayGpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); } RILogManager.Default?.ExitMethod(dropout.Name); //ArcSinH ArcSinH a = new ArcSinH(); RILogManager.Default?.EnterMethod(a.Name); sw.Restart(); gradArrayCpu = a.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); a.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (a.SetGpuEnable(true)) { HandleGPU(verbose, sw, a, inputArrayGpu); } RILogManager.Default?.ExitMethod(a.Name); //ELU ELU e = new ELU(); RILogManager.Default?.EnterMethod(e.Name); sw.Restart(); gradArrayCpu = e.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); e.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); RILogManager.Default?.ExitMethod(e.Name); //LeakyReluShifted LeakyReLUShifted lrs = new LeakyReLUShifted(); RILogManager.Default?.EnterMethod(lrs.Name); sw.Restart(); gradArrayCpu = lrs.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); lrs.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (lrs.SetGpuEnable(true)) { HandleGPU(verbose, sw, lrs, inputArrayGpu); } RILogManager.Default?.ExitMethod(lrs.Name); //Logistic LogisticFunction lf = new LogisticFunction(); RILogManager.Default?.EnterMethod(lf.Name); sw.Restart(); gradArrayCpu = lf.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); lf.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (lf.SetGpuEnable(true)) { HandleGPU(verbose, sw, lf, inputArrayGpu); } RILogManager.Default?.ExitMethod(lf.Name); //MaxMinusOne MaxMinusOne mmo = new MaxMinusOne(); RILogManager.Default?.EnterMethod(mmo.Name); sw.Restart(); gradArrayCpu = mmo.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); mmo.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (mmo.SetGpuEnable(true)) { HandleGPU(verbose, sw, mmo, inputArrayGpu); } RILogManager.Default?.ExitMethod(mmo.Name); //ScaledELU ScaledELU se = new ScaledELU(); RILogManager.Default?.EnterMethod(se.Name); sw.Restart(); gradArrayCpu = se.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); se.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (se.SetGpuEnable(true)) { HandleGPU(verbose, sw, se, inputArrayGpu); } RILogManager.Default?.ExitMethod(se.Name); //Sine Sine s = new Sine(); RILogManager.Default?.EnterMethod(s.Name); sw.Restart(); gradArrayCpu = s.Forward(verbose, inputArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); gradArrayCpu[0].Grad = gradArrayCpu[0].Data; sw.Restart(); s.Backward(verbose, gradArrayCpu); sw.Stop(); RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs"); if (s.SetGpuEnable(true)) { HandleGPU(verbose, sw, s, inputArrayGpu); } RILogManager.Default?.ExitMethod(s.Name); }
public bool CreateLayer(int nCount, ELayerType type, ActivationSettings activationSettings) { Layer.Utility.Layer layer; switch (type) { case ELayerType.Invalid: throw new ArgumentException("Invalid \"type\" argument."); case ELayerType.AveragePooling: layer = new AveragePooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.AverageUnpooling: layer = new AverageUnpooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Convolutional: layer = new Convolutional(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Deconvolutional: layer = new Deconvolutional(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Dropout: layer = new Dropout(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.FullyConnected: layer = new FullyConnected(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.GatedRecurrent: layer = new GatedRecurrent(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.LSTM: layer = new LSTM(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.MaxPooling: layer = new MaxPooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.MaxUnpooling: layer = new MaxUnpooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Recurrent: layer = new Recurrent(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); default: throw new ArgumentException("Invalid \"type\" argument."); } }
public void ForwardBackwardTest3() { var data = new double[] { 3, 2, 1, 5, 4, 2, 6, 1, 8, 2, 6, 3, 6, 3, 9, 5, 3, 2, 1, 5, 4, 2, 6, 1, 8, 2, 6, 3, 6, 3, 9, 5, 3, 2, 1, 5, 4, 2, 6, 1, 8, 2, 6, 3, 6, 3, 9, 5, 3, 2, 1, 5, 4, 2, 6, 1, 8, 2, 6, 3, 6, 3, 9, 5, 3, 2, 1, 5, 4, 2, 6, 1, 8, 2, 6, 3, 6, 3, 9, 5, 3, 2, 1, 5, 4, 2, 6, 1, 8, 2, 6, 3, 6, 3, 9, 5, }; var input = new TensorOld(data, 2, 3, 4, 4); var pooling = new MaxPooling(1, 3, 1, 3); pooling.PrepareTrain(input); var actual = pooling.Forward(input); var expected = new TensorOld(new double[] { 3, 6, 8, 9, 3, 6, 8, 9, 3, 6, 8, 9, 3, 6, 8, 9, 3, 6, 8, 9, 3, 6, 8, 9, }, 2, 3, 4, 1); Assert.Equal(expected, actual); var error = new TensorOld(new double[] { 0.8, 0.7, -1.5, 0.4, 0.8, 0.7, -1.5, 0.4, 0.8, 0.7, -1.5, 0.4, 0.8, 0.7, -1.5, 0.4, 0.8, 0.7, -1.5, 0.4, 0.8, 0.7, -1.5, 0.4, }, 2, 3, 4, 1); var backward = pooling.Backward(error); var backData = new double[] { 0.8, 0, 0, 0, 0, 0, 0.7, 0, -1.5, 0, 0, 0, 0, 0, 0.4, 0, 0.8, 0, 0, 0, 0, 0, 0.7, 0, -1.5, 0, 0, 0, 0, 0, 0.4, 0, 0.8, 0, 0, 0, 0, 0, 0.7, 0, -1.5, 0, 0, 0, 0, 0, 0.4, 0, 0.8, 0, 0, 0, 0, 0, 0.7, 0, -1.5, 0, 0, 0, 0, 0, 0.4, 0, 0.8, 0, 0, 0, 0, 0, 0.7, 0, -1.5, 0, 0, 0, 0, 0, 0.4, 0, 0.8, 0, 0, 0, 0, 0, 0.7, 0, -1.5, 0, 0, 0, 0, 0, 0.4, 0, }; var backExpected = new TensorOld(backData, 2, 3, 4, 4); Assert.Equal(backExpected, backward); }