コード例 #1
0
        public static IActivationFunction GetActivationFunction(ActivationFunction activationFunction)
        {
            IActivationFunction result;

            switch (activationFunction)
            {
            case ActivationFunction.ELU:
                result = new ELU();
                break;

            case ActivationFunction.ReLU:
                result = new ReLU();
                break;

            case ActivationFunction.Sigmoid:
                result = new Sigmoid();
                break;

            case ActivationFunction.None:
                result = null;
                break;

            case ActivationFunction.Swish:
                result = new Swish();
                break;

            default:
                result = new ReLU();
                break;
            }
            return(result);
        }
        private void btn_createNetwork_Click(object sender, RoutedEventArgs e)
        {
            int[] layers = new int[this.wp_layers.Items.Count + 2];
            layers[0] = Convert.ToInt32((this.ucInputLayer.Content as NetwokLayerSizeControl).GetLayerSize());

            for (int i = 1; i < layers.Length - 1; i++)
            {
                layers[i] = Convert.ToInt32((this.wp_layers.Items[i - 1] as NetwokLayerSizeControl).GetLayerSize());
            }

            layers[layers.Length - 1] = Convert.ToInt32((this.ucOutputLayer.Content as NetwokLayerSizeControl).GetLayerSize());

            ActivationStrategy acti = new Sigmoid();

            if (this.cb_activationFunctions.SelectedIndex == 0)
            {
                acti = new Sigmoid();
            }
            else if (this.cb_activationFunctions.SelectedIndex == 1)
            {
                acti = new TanH();
            }
            else if (this.cb_activationFunctions.SelectedIndex == 2)
            {
                acti = new ReLU();
            }

            NetworkHelper.SaveNetToFile(new NeuralNetwork(layers, acti), "neuralNetwork.txt");
            MessageBox.Show("Success!");
        }
コード例 #3
0
        public void CalculateActivationTest(float input, float expected)
        {
            float epsilon = 0.0001f;

            ActivationFunction activation = new ReLU();
            float result = activation.CalculateActivation(input);

            Assert.IsTrue(Math.Abs(result - expected) < epsilon, $"Expected value <{expected}>. Actual <{result}>");
        }
コード例 #4
0
 public static ushort EnCodeIConduction(IConduction conduction)
 {
     return(conduction switch
     {
         Sigmoind _ => 1,
         ReLU _ => 2,
         SoftReLU _ => 3,
         Straight _ => 4,
         SoftMax _ => 5,
         _ => throw new ArgumentException(
             nameof(conduction), $"this type of IConduction is not registered. {conduction}"),
     });
コード例 #5
0
        public static void Run()
        {
            sw = new Stopwatch();

            Console.WriteLine("Generating Test Data...");
            NdArray input      = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE));
            NdArray inputImage = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);

            Console.WriteLine("Generated Test Data");

            Console.WriteLine("Init Linear");
            Linear linear = new Linear(INPUT_SIZE, OUTPUT_SIZE);

            Console.WriteLine("Init Tanh");
            Tanh tanh = new Tanh();

            Console.WriteLine("Init Sigmoid");
            Sigmoid sigmoid = new Sigmoid();

            Console.WriteLine("Init ReLU");
            ReLU relu = new ReLU();

            Console.WriteLine("Init LeakyReLU");
            LeakyReLU leakyRelu = new LeakyReLU();

            Console.WriteLine("Init MaxPooling");
            MaxPooling maxPooling = new MaxPooling(2);

            Console.WriteLine("Init Convolution2D");
            Convolution2D conv2d = new Convolution2D(3, 32, 3);

            Console.WriteLine("Init Deconvolution2D");
            Deconvolution2D deconv2d = new Deconvolution2D(32, 3, 3);

            Dropout dropout = new Dropout();

            TestLayer(linear, input);
            Console.WriteLine("aaaaaaaaaaaa");
            Console.ReadLine();

            TestLayer(tanh, input);
            TestLayer(sigmoid, input);
            TestLayer(relu, input);
            TestLayer(leakyRelu, input);

            TestLayer(maxPooling, inputImage);
            TestLayer(conv2d, inputImage);
            TestLayer(deconv2d, inputImage);

            TestLayer(dropout, input);
        }
コード例 #6
0
ファイル: ReLUTests.cs プロジェクト: tonghudan/MLStudy.NET
        public void ReLUTest()
        {
            var input    = new TensorOld(new double[] { 1, 2, -3, 5, -2, 7, 4, 6, 8, -5, 4, 1 }, 3, 4);
            var expected = new TensorOld(new double[] { 1, 2, 0, 5, 0, 7, 4, 6, 8, 0, 4, 1 }, 3, 4);
            var relu     = new ReLU();

            relu.PrepareTrain(input);
            var actual = relu.Forward(input);

            Assert.Equal(expected, actual);

            var error    = TensorOld.Values(0.1, 3, 4);
            var expError = new TensorOld(new double[] { 0.1, 0.1, 0, 0.1, 0, 0.1, 0.1, 0.1, 0.1, 0, 0.1, 0.1 }, 3, 4);
            var actError = relu.Backward(error);

            Assert.Equal(expError, actError);
        }
コード例 #7
0
        static public List <Layer> BuildFromConfig(ModelConfig c, double mCenter = 0, double mOffset = 1, double bCenter = 0, double bOffset = 1)
        {
            int          deep   = c.layers.Count - 1;
            List <Layer> layers = new List <Layer>(deep);
            ReLU         layer  = new ReLU();

            if (deep <= 1)
            {
                throw new Exception("Too few layers!");
            }
            else
            {
                layers.Add(BuildRandom(c.layers[0].w, c.layers[1].w, layer, mCenter, mOffset, bCenter, bOffset));
                for (int i = 1; i < deep - 1; i++)
                {
                    layers.Add(BuildRandom(c.layers[i].w, c.layers[i + 1].w, layer, mCenter, mOffset, bCenter, bOffset));
                }
                layers.Add(BuildRandom(c.layers[deep - 1].w, c.layers[deep].w, layer, mCenter, mOffset, bCenter, bOffset));
            }
            return(layers);
        }
コード例 #8
0
ファイル: Grid.cs プロジェクト: nategadzhi/OpenMined
        private Sequential CreateSequential(List <String> model)
        {
            // TODO just assumes it is all in a seq model the seq model should probably
            // be in the JSON????
            var seq = new Sequential(controller);

            foreach (var l in model)
            {
                var   config = JObject.Parse(l);
                Layer layer  = null;
                switch ((string)config["name"])
                {
                case "linear":
                    layer = new Linear(controller, (int)config["input"], (int)config["output"]);
                    break;

                case "softmax":
                    layer = new Softmax(controller, (int)config["dim"]);
                    break;

                case "relu":
                    layer = new ReLU(controller);
                    break;

                case "log":
                    layer = new Log(controller);
                    break;

                case "dropout":
                    layer = new Dropout(controller, (float)config["rate"]);
                    break;
                }
                seq.AddLayer(layer);
            }

            return(seq);
        }
コード例 #9
0
ファイル: SingleBenchmark.cs プロジェクト: ohisama/KelpNet
        public static void Run()
        {
            Stopwatch sw = new Stopwatch();

            NdArray inputArrayCpu = new NdArray(Initializer.GetRealArray(INPUT_SIZE));
            NdArray inputArrayGpu = new NdArray(Initializer.GetRealArray(INPUT_SIZE));

            //Linear
            Linear linear = new Linear(INPUT_SIZE, OUTPUT_SIZE);

            Console.WriteLine("◆" + linear.Name);

            sw.Restart();
            NdArray[] gradArrayCpu = linear.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data; //DataをGradとして使用

            sw.Restart();
            linear.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (linear.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = linear.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                linear.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //Tanh
            TanhActivation tanh = new TanhActivation();

            Console.WriteLine("\n◆" + tanh.Name);

            sw.Restart();
            gradArrayCpu = tanh.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            tanh.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (tanh.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = tanh.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                tanh.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //Sigmoid
            Sigmoid sigmoid = new Sigmoid();

            Console.WriteLine("\n◆" + sigmoid.Name);

            sw.Restart();
            gradArrayCpu = sigmoid.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sigmoid.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (sigmoid.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = sigmoid.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                sigmoid.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //ReLU
            ReLU relu = new ReLU();

            Console.WriteLine("\n◆" + relu.Name);

            sw.Restart();
            gradArrayCpu = relu.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            relu.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (relu.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = relu.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                relu.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //LeakyReLU
            LeakyReLU leakyRelu = new LeakyReLU();

            Console.WriteLine("\n◆" + leakyRelu.Name);

            sw.Restart();
            gradArrayCpu = leakyRelu.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            leakyRelu.Backward(gradArrayCpu);
            sw.Stop();

            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (leakyRelu.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = leakyRelu.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                leakyRelu.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            NdArray inputImageArrayGpu = new NdArray(Initializer.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);
            NdArray inputImageArrayCpu = new NdArray(Initializer.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);


            //MaxPooling
            MaxPooling2D maxPooling2D = new MaxPooling2D(3);

            Console.WriteLine("\n◆" + maxPooling2D.Name);

            sw.Restart();
            NdArray[] gradImageArrayCpu = maxPooling2D.Forward(inputImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            maxPooling2D.Backward(gradImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (maxPooling2D.SetGpuEnable(true))
            {
                sw.Restart();
                maxPooling2D.Forward(inputImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                //メモリ転送のみのため実装がない
                Console.WriteLine("Backward[Gpu] : None");
            }


            //Conv2D
            Convolution2D conv2d = new Convolution2D(3, 3, 3);

            Console.WriteLine("\n◆" + conv2d.Name);

            sw.Restart();
            gradImageArrayCpu = conv2d.Forward(inputImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            conv2d.Backward(gradImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (conv2d.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradImageArrayGpu = conv2d.Forward(inputImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradImageArrayGpu[0].Grad = gradImageArrayGpu[0].Data;

                sw.Restart();
                conv2d.Backward(gradImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //Deconv2D
            Deconvolution2D deconv2d = new Deconvolution2D(3, 3, 3);

            Console.WriteLine("\n◆" + deconv2d.Name);

            sw.Restart();
            gradImageArrayCpu = deconv2d.Forward(inputImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            deconv2d.Backward(gradImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (deconv2d.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradImageArrayGpu = deconv2d.Forward(inputImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradImageArrayGpu[0].Grad = gradImageArrayGpu[0].Data;

                sw.Restart();
                deconv2d.Backward(gradImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            //Dropout
            Dropout dropout = new Dropout();

            Console.WriteLine("\n◆" + dropout.Name);

            sw.Restart();
            gradArrayCpu = dropout.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            dropout.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (dropout.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = dropout.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                dropout.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }
        }
コード例 #10
0
ファイル: Layer.cs プロジェクト: xuan2261/XNet
        public Layer(int nCount, int index, ActivationSettings activationSettings)
        {
            NCount = nCount;

            Index = index;

            ActivationType = activationSettings.Type();

            // Activation Setup
            switch (activationSettings.Type())
            {
            case EActivationType.Invalid:
                Activation = null;
                throw new ArgumentException("Activation Type Invalid.");

            case EActivationType.Arctan:
                Activation = new Arctan();
                break;

            case EActivationType.BinaryStep:
                Activation = new BinaryStep();
                break;

            case EActivationType.BipolarSigmoid:
                Activation = new BipolarSigmoid();
                break;

            case EActivationType.ELU:
                Activation = new ELU((ELUSettings)activationSettings);
                break;

            case EActivationType.HardSigmoid:
                Activation = new HardSigmoid();
                break;

            case EActivationType.HardTanh:
                Activation = new HardTanh();
                break;

            case EActivationType.Identity:
                Activation = new Identity();
                break;

            case EActivationType.Logit:
                Activation = new Logit();
                break;

            case EActivationType.LReLU:
                Activation = new LReLU((LReLUSettings)activationSettings);
                break;

            case EActivationType.Mish:
                Activation = new Mish();
                break;

            case EActivationType.ReLU:
                Activation = new ReLU();
                break;

            case EActivationType.SeLU:
                Activation = new SeLU();
                break;

            case EActivationType.Sigmoid:
                Activation = new Sigmoid();
                break;

            case EActivationType.Softmax:
                Activation = new Softmax();
                break;

            case EActivationType.Softplus:
                Activation = new Softplus();
                break;

            case EActivationType.Softsign:
                Activation = new Softsign();
                break;

            case EActivationType.Tanh:
                Activation = new Tanh();
                break;

            default:
                throw new ArgumentException("Activation Type Invalid.");
            }
        }
コード例 #11
0
 internal virtual Variable Activate(Variable x)
 {
     return(ReLU.ForwardStatic(x));
 }
コード例 #12
0
        public static void Run(bool verbose)
        {
            Stopwatch sw = new Stopwatch();

            NdArray inputArrayCpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE));
            NdArray inputArrayGpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE));

            Ensure.Argument(inputArrayGpu).NotNull();
            Ensure.Argument(inputArrayCpu).NotNull();

            //Linear
            Linear linear = new Linear(verbose, INPUT_SIZE, OUTPUT_SIZE);

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(linear.Name);
            }

            sw.Restart();
            NdArray[] gradArrayCpu = linear.Forward(verbose, inputArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            Ensure.Argument(gradArrayCpu).NotNull();

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data; // Use Data as Grad

            sw.Restart();
            linear.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            if (linear.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = linear.Forward(verbose, inputArrayGpu);
                sw.Stop();
                if (verbose)
                {
                    RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
                }

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                linear.Backward(verbose, gradArrayGpu);
                sw.Stop();
                if (verbose)
                {
                    RILogManager.Default?.SendDebug("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
                }
            }
            if (verbose)
            {
                RILogManager.Default?.ExitMethod(linear.Name);
            }

            //Tanh
            Tanh tanh = new Tanh();

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(tanh.Name);
            }

            sw.Restart();
            gradArrayCpu = tanh.Forward(verbose, inputArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            tanh.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            if (tanh.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, tanh, inputArrayGpu);
            }

            if (verbose)
            {
                RILogManager.Default?.ExitMethod(tanh.Name);
            }



            //Sigmoid
            Sigmoid sigmoid = new Sigmoid();

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(sigmoid.Name);
            }

            sw.Restart();
            gradArrayCpu = sigmoid.Forward(verbose, inputArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sigmoid.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            if (sigmoid.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, sigmoid, inputArrayGpu);
            }
            if (verbose)
            {
                RILogManager.Default?.ExitMethod(tanh.Name);
            }


            //Softmax
            Softmax sm = new Softmax();

            RILogManager.Default?.EnterMethod(sm.Name);

            sw.Restart();
            gradArrayCpu = sm.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sm.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }
            if (verbose)
            {
                RILogManager.Default?.ExitMethod(sm.Name);
            }



            //Softplus
            Softplus sp = new Softplus();

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(sp.Name);
            }

            sw.Restart();
            gradArrayCpu = sp.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sp.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            RILogManager.Default?.ExitMethod(sp.Name);


            //ReLU
            ReLU relu = new ReLU();

            RILogManager.Default?.EnterMethod(relu.Name);

            sw.Restart();
            gradArrayCpu = relu.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            relu.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (relu.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, relu, inputArrayGpu);
            }

            RILogManager.Default?.ExitMethod(relu.Name);


            //LeakyReLU
            LeakyReLU leakyRelu = new LeakyReLU();

            RILogManager.Default?.EnterMethod(leakyRelu.Name);

            sw.Restart();
            gradArrayCpu = leakyRelu.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            leakyRelu.Backward(verbose, gradArrayCpu);
            sw.Stop();

            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (leakyRelu.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, leakyRelu, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(leakyRelu.Name);


            //ReLuTanh
            ReLuTanh rth = new ReLuTanh();

            RILogManager.Default?.EnterMethod(rth.Name);

            sw.Restart();
            gradArrayCpu = rth.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            rth.Backward(verbose, gradArrayCpu);
            sw.Stop();

            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (rth.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, rth, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(rth.Name);


            ////Swish
            //Swish swi = new Swish();
            //RILogManager.Default?.SendDebug(swi.Name);

            //sw.Restart();
            //gradArrayCpu = swi.Forward(inputArrayCpu);
            //sw.Stop();
            //RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            //gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            //sw.Restart();
            //swi.Backward(gradArrayCpu);
            //sw.Stop();

            //RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");


            NdArray inputImageArrayGpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);
            NdArray inputImageArrayCpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);

            //MaxPooling
            MaxPooling maxPooling = new MaxPooling(3);

            RILogManager.Default?.EnterMethod(maxPooling.Name);

            sw.Restart();
            NdArray[] gradImageArrayCpu = maxPooling.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            maxPooling.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (maxPooling.SetGpuEnable(true))
            {
                sw.Restart();
                maxPooling.Forward(verbose, inputImageArrayGpu);
                sw.Stop();
                RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                // There is no implementation for memory transfer only
                RILogManager.Default?.SendDebug("Backward[Gpu] : None");
            }
            RILogManager.Default?.ExitMethod(maxPooling.Name);


            //AvgPooling
            AveragePooling avgPooling = new AveragePooling(3);

            RILogManager.Default?.EnterMethod(avgPooling.Name);

            sw.Restart();
            gradImageArrayCpu = avgPooling.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            avgPooling.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            RILogManager.Default?.ExitMethod(avgPooling.Name);


            //Conv2D
            Convolution2D conv2d = new Convolution2D(verbose, 3, 3, 3);

            RILogManager.Default?.EnterMethod(conv2d.Name);

            sw.Restart();
            gradImageArrayCpu = conv2d.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            conv2d.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (conv2d.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, conv2d, inputArrayGpu);
            }

            RILogManager.Default?.ExitMethod(conv2d.Name);


            //Deconv2D
            Deconvolution2D deconv2d = new Deconvolution2D(verbose, 3, 3, 3);

            RILogManager.Default?.EnterMethod(deconv2d.Name);

            sw.Restart();
            gradImageArrayCpu = deconv2d.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            deconv2d.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (deconv2d.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, deconv2d, inputArrayGpu);
            }

            RILogManager.Default?.ExitMethod(deconv2d.Name);


            //Dropout
            Dropout dropout = new Dropout();

            RILogManager.Default?.EnterMethod(dropout.Name);

            sw.Restart();
            gradArrayCpu = dropout.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            dropout.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (dropout.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = dropout.Forward(verbose, inputArrayGpu);
                sw.Stop();
                RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                dropout.Backward(verbose, gradArrayGpu);
                sw.Stop();
                RILogManager.Default?.SendDebug("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }
            RILogManager.Default?.ExitMethod(dropout.Name);

            //ArcSinH
            ArcSinH a = new ArcSinH();

            RILogManager.Default?.EnterMethod(a.Name);

            sw.Restart();
            gradArrayCpu = a.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            a.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (a.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, a, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(a.Name);

            //ELU
            ELU e = new ELU();

            RILogManager.Default?.EnterMethod(e.Name);

            sw.Restart();
            gradArrayCpu = e.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            e.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            RILogManager.Default?.ExitMethod(e.Name);

            //LeakyReluShifted
            LeakyReLUShifted lrs = new LeakyReLUShifted();

            RILogManager.Default?.EnterMethod(lrs.Name);

            sw.Restart();
            gradArrayCpu = lrs.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            lrs.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (lrs.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, lrs, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(lrs.Name);


            //Logistic
            LogisticFunction lf = new LogisticFunction();

            RILogManager.Default?.EnterMethod(lf.Name);

            sw.Restart();
            gradArrayCpu = lf.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            lf.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (lf.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, lf, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(lf.Name);


            //MaxMinusOne
            MaxMinusOne mmo = new MaxMinusOne();

            RILogManager.Default?.EnterMethod(mmo.Name);

            sw.Restart();
            gradArrayCpu = mmo.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            mmo.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (mmo.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, mmo, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(mmo.Name);


            //ScaledELU
            ScaledELU se = new ScaledELU();

            RILogManager.Default?.EnterMethod(se.Name);

            sw.Restart();
            gradArrayCpu = se.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            se.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (se.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, se, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(se.Name);


            //Sine
            Sine s = new Sine();

            RILogManager.Default?.EnterMethod(s.Name);

            sw.Restart();
            gradArrayCpu = s.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            s.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (s.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, s, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(s.Name);
        }
コード例 #13
0
ファイル: SyftController.cs プロジェクト: ygambhir/OpenMined
        public void ProcessMessage(string json_message, MonoBehaviour owner, Action <string> response)
        {
            Command msgObj = JsonUtility.FromJson <Command> (json_message);

            try
            {
                switch (msgObj.objectType)
                {
                case "Optimizer":
                {
                    if (msgObj.functionCall == "create")
                    {
                        string optimizer_type = msgObj.tensorIndexParams[0];

                        // Extract parameters
                        List <int> p = new List <int>();
                        for (int i = 1; i < msgObj.tensorIndexParams.Length; i++)
                        {
                            p.Add(int.Parse(msgObj.tensorIndexParams[i]));
                        }
                        List <float> hp = new List <float>();
                        for (int i = 0; i < msgObj.hyperParams.Length; i++)
                        {
                            hp.Add(float.Parse(msgObj.hyperParams[i]));
                        }

                        Optimizer optim = null;

                        if (optimizer_type == "sgd")
                        {
                            optim = new SGD(this, p, hp[0], hp[1], hp[2]);
                        }
                        else if (optimizer_type == "rmsprop")
                        {
                            optim = new RMSProp(this, p, hp[0], hp[1], hp[2], hp[3]);
                        }
                        else if (optimizer_type == "adam")
                        {
                            optim = new Adam(this, p, hp[0], hp[1], hp[2], hp[3], hp[4]);
                        }

                        response(optim.Id.ToString());
                        return;
                    }
                    else
                    {
                        Optimizer optim = this.GetOptimizer(msgObj.objectIndex);
                        response(optim.ProcessMessage(msgObj, this));

                        return;
                    }
                }

                case "FloatTensor":
                {
                    if (msgObj.objectIndex == 0 && msgObj.functionCall == "create")
                    {
                        FloatTensor tensor = floatTensorFactory.Create(_shape: msgObj.shape, _data: msgObj.data, _shader: this.Shader);
                        response(tensor.Id.ToString());
                        return;
                    }
                    else
                    {
                        FloatTensor tensor = floatTensorFactory.Get(msgObj.objectIndex);
                        // Process message's function
                        response(tensor.ProcessMessage(msgObj, this));
                        return;
                    }
                }

                case "IntTensor":
                {
                    if (msgObj.objectIndex == 0 && msgObj.functionCall == "create")
                    {
                        int[] data = new int[msgObj.data.Length];
                        for (int i = 0; i < msgObj.data.Length; i++)
                        {
                            data[i] = (int)msgObj.data[i];
                        }
                        IntTensor tensor = intTensorFactory.Create(_shape: msgObj.shape, _data: data);
                        response(tensor.Id.ToString());
                        return;
                    }
                    else
                    {
                        IntTensor tensor = intTensorFactory.Get(msgObj.objectIndex);
                        // Process message's function
                        response(tensor.ProcessMessage(msgObj, this));
                        return;
                    }
                }

                case "agent":
                {
                    if (msgObj.functionCall == "create")
                    {
                        Layer     model     = (Layer)GetModel(int.Parse(msgObj.tensorIndexParams[0]));
                        Optimizer optimizer = optimizers[int.Parse(msgObj.tensorIndexParams[1])];
                        response(new Syft.NN.RL.Agent(this, model, optimizer).Id.ToString());
                        return;
                    }

                    //Debug.Log("Getting Model:" + msgObj.objectIndex);
                    Syft.NN.RL.Agent agent = this.GetAgent(msgObj.objectIndex);
                    response(agent.ProcessMessageLocal(msgObj, this));
                    return;
                }

                case "model":
                {
                    if (msgObj.functionCall == "create")
                    {
                        string model_type = msgObj.tensorIndexParams[0];

                        Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type);

                        if (model_type == "linear")
                        {
                            response(this.BuildLinear(msgObj.tensorIndexParams).Id.ToString());
                            return;
                        }
                        else if (model_type == "relu")
                        {
                            response(this.BuildReLU().Id.ToString());
                            return;
                        }
                        else if (model_type == "log")
                        {
                            response(this.BuildLog().Id.ToString());
                            return;
                        }
                        else if (model_type == "dropout")
                        {
                            response(this.BuildDropout(msgObj.tensorIndexParams).Id.ToString());
                            return;
                        }
                        else if (model_type == "sigmoid")
                        {
                            response(this.BuildSigmoid().Id.ToString());
                            return;
                        }
                        else if (model_type == "sequential")
                        {
                            response(this.BuildSequential().Id.ToString());
                            return;
                        }
                        else if (model_type == "softmax")
                        {
                            response(this.BuildSoftmax(msgObj.tensorIndexParams).Id.ToString());
                            return;
                        }
                        else if (model_type == "logsoftmax")
                        {
                            response(this.BuildLogSoftmax(msgObj.tensorIndexParams).Id.ToString());
                            return;
                        }
                        else if (model_type == "tanh")
                        {
                            response(new Tanh(this).Id.ToString());
                            return;
                        }
                        else if (model_type == "crossentropyloss")
                        {
                            response(new CrossEntropyLoss(this, int.Parse(msgObj.tensorIndexParams[1])).Id.ToString());
                            return;
                        }
                        else if (model_type == "categorical_crossentropy")
                        {
                            response(new CategoricalCrossEntropyLoss(this).Id.ToString());
                            return;
                        }
                        else if (model_type == "nllloss")
                        {
                            response(new NLLLoss(this).Id.ToString());
                            return;
                        }
                        else if (model_type == "mseloss")
                        {
                            response(new MSELoss(this).Id.ToString());
                            return;
                        }
                        else if (model_type == "embedding")
                        {
                            response(new Embedding(this, int.Parse(msgObj.tensorIndexParams[1]), int.Parse(msgObj.tensorIndexParams[2])).Id.ToString());
                            return;
                        }
                        else
                        {
                            Debug.LogFormat("<color=red>Model Type Not Found:</color> {0}", model_type);
                        }
                    }
                    else
                    {
                        //Debug.Log("Getting Model:" + msgObj.objectIndex);
                        Model model = this.GetModel(msgObj.objectIndex);
                        response(model.ProcessMessage(msgObj, this));
                        return;
                    }
                    response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall);
                    return;
                }

                case "controller":
                {
                    if (msgObj.functionCall == "num_tensors")
                    {
                        response(floatTensorFactory.Count() + "");
                        return;
                    }
                    else if (msgObj.functionCall == "num_models")
                    {
                        response(models.Count + "");
                        return;
                    }
                    else if (msgObj.functionCall == "new_tensors_allowed")
                    {
                        Debug.LogFormat("New Tensors Allowed:{0}", msgObj.tensorIndexParams[0]);
                        if (msgObj.tensorIndexParams[0] == "True")
                        {
                            allow_new_tensors = true;
                        }
                        else if (msgObj.tensorIndexParams[0] == "False")
                        {
                            allow_new_tensors = false;
                        }
                        else
                        {
                            throw new Exception("Invalid parameter for new_tensors_allowed. Did you mean true or false?");
                        }

                        response(allow_new_tensors + "");
                        return;
                    }
                    else if (msgObj.functionCall == "load_floattensor")
                    {
                        FloatTensor tensor = floatTensorFactory.Create(filepath: msgObj.tensorIndexParams[0], _shader: this.Shader);
                        response(tensor.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "set_seed")
                    {
                        Random.InitState(int.Parse(msgObj.tensorIndexParams[0]));
                        response("Random seed set!");
                        return;
                    }
                    else if (msgObj.functionCall == "concatenate")
                    {
                        List <int> tensor_ids = new List <int>();
                        for (int i = 1; i < msgObj.tensorIndexParams.Length; i++)
                        {
                            tensor_ids.Add(int.Parse(msgObj.tensorIndexParams[i]));
                        }
                        FloatTensor result = Functional.Concatenate(floatTensorFactory, tensor_ids, int.Parse(msgObj.tensorIndexParams[0]));
                        response(result.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "ones")
                    {
                        int[] dims = new int[msgObj.tensorIndexParams.Length];
                        for (int i = 0; i < msgObj.tensorIndexParams.Length; i++)
                        {
                            dims[i] = int.Parse(msgObj.tensorIndexParams[i]);
                        }
                        FloatTensor result = Functional.Ones(floatTensorFactory, dims);
                        response(result.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "randn")
                    {
                        int[] dims = new int[msgObj.tensorIndexParams.Length];
                        for (int i = 0; i < msgObj.tensorIndexParams.Length; i++)
                        {
                            dims[i] = int.Parse(msgObj.tensorIndexParams[i]);
                        }
                        FloatTensor result = Functional.Randn(floatTensorFactory, dims);
                        response(result.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "random")
                    {
                        int[] dims = new int[msgObj.tensorIndexParams.Length];
                        for (int i = 0; i < msgObj.tensorIndexParams.Length; i++)
                        {
                            dims[i] = int.Parse(msgObj.tensorIndexParams[i]);
                        }
                        FloatTensor result = Functional.Random(floatTensorFactory, dims);
                        response(result.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "zeros")
                    {
                        int[] dims = new int[msgObj.tensorIndexParams.Length];
                        for (int i = 0; i < msgObj.tensorIndexParams.Length; i++)
                        {
                            dims[i] = int.Parse(msgObj.tensorIndexParams[i]);
                        }
                        FloatTensor result = Functional.Zeros(floatTensorFactory, dims);
                        response(result.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "model_from_json")
                    {
                        Debug.Log("Loading Model from JSON:");
                        var json_str = msgObj.tensorIndexParams[0];
                        var config   = JObject.Parse(json_str);

                        Sequential model;

                        if ((string)config["class_name"] == "Sequential")
                        {
                            model = this.BuildSequential();
                        }
                        else
                        {
                            response("Unity Error: SyftController.processMessage: while Loading model, Class :" + config["class_name"] + " is not implemented");
                            return;
                        }

                        for (int i = 0; i < config["config"].ToList().Count; i++)
                        {
                            var layer_desc        = config["config"][i];
                            var layer_config_desc = layer_desc["config"];

                            if ((string)layer_desc["class_name"] == "Linear")
                            {
                                int previous_output_dim;

                                if (i == 0)
                                {
                                    previous_output_dim = (int)layer_config_desc["batch_input_shape"][layer_config_desc["batch_input_shape"].ToList().Count - 1];
                                }
                                else
                                {
                                    previous_output_dim = (int)layer_config_desc["units"];
                                }

                                string[] parameters = { "linear", previous_output_dim.ToString(), layer_config_desc["units"].ToString(), "Xavier" };
                                Layer    layer      = this.BuildLinear(parameters);
                                model.AddLayer(layer);

                                string activation_name = layer_config_desc["activation"].ToString();

                                if (activation_name != "linear")
                                {
                                    Layer activation;
                                    if (activation_name == "softmax")
                                    {
                                        parameters = new string[] { activation_name, "1" };
                                        activation = this.BuildSoftmax(parameters);
                                    }
                                    else if (activation_name == "relu")
                                    {
                                        activation = this.BuildReLU();
                                    }
                                    else
                                    {
                                        response("Unity Error: SyftController.processMessage: while Loading activations, Activation :" + activation_name + " is not implemented");
                                        return;
                                    }
                                    model.AddLayer(activation);
                                }
                            }
                            else
                            {
                                response("Unity Error: SyftController.processMessage: while Loading layers, Layer :" + layer_desc["class_name"] + " is not implemented");
                                return;
                            }
                        }

                        response(model.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "from_proto")
                    {
                        Debug.Log("Loading Model from ONNX:");
                        var filename = msgObj.tensorIndexParams[0];

                        var        input      = File.OpenRead(filename);
                        ModelProto modelProto = ModelProto.Parser.ParseFrom(input);

                        Sequential model = this.BuildSequential();

                        foreach (NodeProto node in modelProto.Graph.Node)
                        {
                            Layer      layer;
                            GraphProto g = ONNXTools.GetSubGraphFromNodeAndMainGraph(node, modelProto.Graph);
                            if (node.OpType == "Gemm")
                            {
                                layer = new Linear(this, g);
                            }
                            else if (node.OpType == "Dropout")
                            {
                                layer = new Dropout(this, g);
                            }
                            else if (node.OpType == "Relu")
                            {
                                layer = new ReLU(this, g);
                            }
                            else if (node.OpType == "Softmax")
                            {
                                layer = new Softmax(this, g);
                            }
                            else
                            {
                                response("Unity Error: SyftController.processMessage: Layer not yet implemented for deserialization:");
                                return;
                            }
                            model.AddLayer(layer);
                        }

                        response(model.Id.ToString());
                        return;
                    }
                    else if (msgObj.functionCall == "to_proto")
                    {
                        ModelProto model    = this.ToProto(msgObj.tensorIndexParams);
                        string     filename = msgObj.tensorIndexParams[2];
                        string     type     = msgObj.tensorIndexParams[3];
                        if (type == "json")
                        {
                            response(model.ToString());
                        }
                        else
                        {
                            using (var output = File.Create(filename))
                            {
                                model.WriteTo(output);
                            }
                            response(new FileInfo(filename).FullName);
                        }
                        return;
                    }

                    response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall);
                    return;
                }

                case "Grid":
                    if (msgObj.functionCall == "learn")
                    {
                        var inputId  = int.Parse(msgObj.tensorIndexParams[0]);
                        var targetId = int.Parse(msgObj.tensorIndexParams[1]);

                        response(this.grid.Run(inputId, targetId, msgObj.configurations, owner));
                        return;
                    }

                    if (msgObj.functionCall == "getResults")
                    {
                        this.grid.GetResults(msgObj.experimentId, response);
                        return;
                    }

                    // like getResults but doesn't pause to wait for results
                    // this function will return right away telling you if
                    // it knows whether or not it is done
                    if (msgObj.functionCall == "checkStatus")
                    {
                        this.grid.CheckStatus(msgObj.experimentId, response);
                        return;
                    }

                    break;

                default:
                    break;
                }
            }
            catch (Exception e)
            {
                Debug.LogFormat("<color=red>{0}</color>", e.ToString());
                response("Unity Error: " + e.ToString());
                return;
            }

            // If not executing createTensor or tensor function, return default error.

            response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall);
            return;
        }
コード例 #14
0
        public static void Run()
        {
            Stopwatch sw = new Stopwatch();

            NdArray inputArrayCpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE));
            NdArray inputArrayGpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE));

            NdArray[] gradArrayCpu = null;

            //Linear
            Linear linear = new Linear(INPUT_SIZE, OUTPUT_SIZE);

            Console.WriteLine("◆" + linear.Name);
            if (TestCpu)
            {
                sw.Restart();
                gradArrayCpu = linear.Forward(inputArrayCpu);
                sw.Stop();
                Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayCpu[0].Grad = gradArrayCpu[0].Data; //Use Data as Grad

                sw.Restart();
                linear.Backward(gradArrayCpu);
                sw.Stop();
                Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            if (linear.SetGpuEnable(true))
            {
                NdArray[] gradArrayGpu = null;
                while (true)
                {
                    sw.Restart();
                    gradArrayGpu = linear.Forward(inputArrayGpu);
                    sw.Stop();
                    Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
                }

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                linear.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //Tanh
            Tanh tanh = new Tanh();

            Console.WriteLine("\n ◆" + tanh.Name);

            sw.Restart();
            gradArrayCpu = tanh.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            tanh.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (tanh.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = tanh.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                tanh.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //Sigmoid
            Sigmoid sigmoid = new Sigmoid();

            Console.WriteLine("\n ◆" + sigmoid.Name);

            sw.Restart();
            gradArrayCpu = sigmoid.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sigmoid.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (sigmoid.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = sigmoid.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                sigmoid.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //ReLU
            ReLU relu = new ReLU();

            Console.WriteLine("\n ◆" + relu.Name);

            sw.Restart();
            gradArrayCpu = relu.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            relu.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (relu.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = relu.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                relu.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //LeakyReLU
            LeakyReLU leakyRelu = new LeakyReLU();

            Console.WriteLine("\n ◆" + leakyRelu.Name);

            sw.Restart();
            gradArrayCpu = leakyRelu.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            leakyRelu.Backward(gradArrayCpu);
            sw.Stop();

            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (leakyRelu.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = leakyRelu.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                leakyRelu.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            NdArray inputImageArrayGpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);
            NdArray inputImageArrayCpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);


            //MaxPooling
            MaxPooling maxPooling = new MaxPooling(3);

            Console.WriteLine("\n ◆" + maxPooling.Name);

            sw.Restart();
            NdArray[] gradImageArrayCpu = maxPooling.Forward(inputImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            maxPooling.Backward(gradImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (maxPooling.SetGpuEnable(true))
            {
                sw.Restart();
                maxPooling.Forward(inputImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                //There is no implementation for memory transfer only
                Console.WriteLine("Backward[Gpu] : None");
            }


            //Conv2D
            Convolution2D conv2d = new Convolution2D(3, 3, 3);

            Console.WriteLine("\n ◆" + conv2d.Name);

            sw.Restart();
            gradImageArrayCpu = conv2d.Forward(inputImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            conv2d.Backward(gradImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (conv2d.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradImageArrayGpu = conv2d.Forward(inputImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradImageArrayGpu[0].Grad = gradImageArrayGpu[0].Data;

                sw.Restart();
                conv2d.Backward(gradImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }


            //Deconv2D
            Deconvolution2D deconv2d = new Deconvolution2D(3, 3, 3);

            Console.WriteLine("\n ◆" + deconv2d.Name);

            sw.Restart();
            gradImageArrayCpu = deconv2d.Forward(inputImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            deconv2d.Backward(gradImageArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (deconv2d.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradImageArrayGpu = deconv2d.Forward(inputImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradImageArrayGpu[0].Grad = gradImageArrayGpu[0].Data;

                sw.Restart();
                deconv2d.Backward(gradImageArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            //Dropout
            Dropout dropout = new Dropout();

            Console.WriteLine("\n ◆" + dropout.Name);

            sw.Restart();
            gradArrayCpu = dropout.Forward(inputArrayCpu);
            sw.Stop();
            Console.WriteLine("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            dropout.Backward(gradArrayCpu);
            sw.Stop();
            Console.WriteLine("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (dropout.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = dropout.Forward(inputArrayGpu);
                sw.Stop();
                Console.WriteLine("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                dropout.Backward(gradArrayGpu);
                sw.Stop();
                Console.WriteLine("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }
        }