Esempio n. 1
0
        public void MultipleInputsOutputs()
        {
            var connList = new List <WeightedDirectedConnection <double> >
            {
                new WeightedDirectedConnection <double>(0, 5, 1.0),
                new WeightedDirectedConnection <double>(1, 3, 1.0),
                new WeightedDirectedConnection <double>(2, 4, 1.0)
            };

            // Create graph.
            var digraph = WeightedAcyclicDirectedGraphBuilder <double> .Create(connList, 3, 3);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new AcyclicNeuralNet(digraph, actFn.Fn, false);

            // Activate and test.
            net.InputVector[0] = 1.0;
            net.InputVector[1] = 2.0;
            net.InputVector[2] = 3.0;
            net.Activate();
            Assert.AreEqual(actFn.Fn(2.0), net.OutputVector[0]);
            Assert.AreEqual(actFn.Fn(3.0), net.OutputVector[1]);
            Assert.AreEqual(actFn.Fn(1.0), net.OutputVector[2]);
        }
Esempio n. 2
0
        public void Complex_WeightOne()
        {
            var connList = new List <WeightedDirectedConnection <double> >
            {
                new WeightedDirectedConnection <double>(0, 4, 1.0),
                new WeightedDirectedConnection <double>(1, 4, 1.0),
                new WeightedDirectedConnection <double>(1, 5, 1.0),
                new WeightedDirectedConnection <double>(3, 4, 1.0),
                new WeightedDirectedConnection <double>(4, 2, 0.9),
                new WeightedDirectedConnection <double>(5, 3, 1.0)
            };

            // Create graph.
            var digraph = WeightedAcyclicDirectedGraphBuilder <double> .Create(connList, 2, 2);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new AcyclicNeuralNet(digraph, actFn.Fn, false);

            // Activate and test.
            net.InputVector[0] = 0.5;
            net.InputVector[1] = 0.25;
            net.Activate();

            double output1 = actFn.Fn(actFn.Fn(0.25));

            Assert.AreEqual(output1, net.OutputVector[1]);

            double output0 = actFn.Fn(actFn.Fn(output1 + 0.5 + 0.25) * 0.9);

            Assert.AreEqual(output0, net.OutputVector[0]);
        }
Esempio n. 3
0
        public void SingleInput_WeightZero()
        {
            var connList = new List <WeightedDirectedConnection <double> >();

            connList.Add(new WeightedDirectedConnection <double>(0, 1, 0.0));

            // Create graph.
            var digraph = WeightedAcyclicDirectedGraphBuilder <double> .Create(connList, 1, 1);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new AcyclicNeuralNet(digraph, actFn.Fn, false);

            // Note. The single connection weight is zero, so the input value has no affect.
            // Activate and test.
            net.InputVector[0] = 100.0;
            net.Activate();
            Assert.AreEqual(0.5, net.OutputVector[0]);

            // Activate and test.
            net.InputVector[0] = 0;
            net.Activate();
            Assert.AreEqual(0.5, net.OutputVector[0]);

            // Activate and test.
            net.InputVector[0] = -100;
            net.Activate();
            Assert.AreEqual(0.5, net.OutputVector[0]);
        }
        public void CyclicOutput()
        {
            var connList = new List <WeightedDirectedConnection <double> >
            {
                new WeightedDirectedConnection <double>(0, 1, 1.0),
                new WeightedDirectedConnection <double>(1, 1, 1.0)
            };

            // Create graph.
            var digraph = WeightedDirectedGraphBuilder <double> .Create(connList, 1, 1);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new CyclicNeuralNet(digraph, actFn.Fn, 1, false);

            // Activate and test.
            const double input    = 0.1;
            double       inputVal = input;

            net.InputVector[0] = inputVal;

            for (int i = 0; i < 10; i++)
            {
                net.Activate();
                double outputExpected = actFn.Fn(inputVal);
                Assert.AreEqual(outputExpected, net.OutputVector[0]);
                inputVal = input + outputExpected;
            }
        }
Esempio n. 5
0
        /// <summary>
        /// Create a prediction based on the learned Theta values and the supplied test item.
        /// </summary>
        /// <param name="y">Training record</param>
        /// <returns></returns>
        public override double Predict(Vector y)
        {
            var tempy = PolynomialFeatures > 0 ? PreProcessing.FeatureDimensions.IncreaseDimensions(y, PolynomialFeatures) : y;

            tempy = tempy.Insert(0, 1.0);
            return(LogisticFunction.Compute((tempy * Theta).ToDouble()) >= 0.5 ? 1d : 0d);
        }
Esempio n. 6
0
        public void TwoInputs_WeightHalf()
        {
            var connList = new List <WeightedDirectedConnection <double> >
            {
                new WeightedDirectedConnection <double>(0, 2, 0.5),
                new WeightedDirectedConnection <double>(1, 2, 0.5)
            };

            // Create graph.
            var digraph = WeightedAcyclicDirectedGraphBuilder <double> .Create(connList, 2, 1);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new AcyclicNeuralNet(digraph, actFn.Fn, false);

            // Activate and test.
            net.InputVector[0] = 0.0;
            net.InputVector[1] = 0.0;
            net.Activate();
            Assert.AreEqual(0.5, net.OutputVector[0]);

            // Activate and test.
            net.InputVector[0] = 1.0;
            net.InputVector[1] = 2.0;
            net.Activate();
            Assert.AreEqual(actFn.Fn(1.5), net.OutputVector[0]);

            // Activate and test.
            net.InputVector[0] = 10.0;
            net.InputVector[1] = 20.0;
            net.Activate();
            Assert.AreEqual(actFn.Fn(15.0), net.OutputVector[0]);
        }
Esempio n. 7
0
        private static void RecurseBackpropagation(Layer layer, Dictionary <Node, double> backwardsPassDeltas, Momentum momentum)
        {
            if (!layer.PreviousLayers.Any())
            {
                // input case
                return;
            }

            var deltas = new Dictionary <Node, double>();

            for (var i = 0; i < layer.Nodes.Length; i++)
            {
                var node            = layer.Nodes[i];
                var sumDeltaWeights = backwardsPassDeltas.Keys.Sum(
                    backPassNode => backwardsPassDeltas[backPassNode] * backPassNode.Weights[node].Value
                    );
                var delta = sumDeltaWeights * LogisticFunction.ComputeDifferentialGivenOutput(node.Output);
                deltas.Add(node, delta);

                foreach (var prevNode in node.Weights.Keys)
                {
                    UpdateNodeWeight(node, prevNode, delta, momentum, i);
                }

                foreach (var prevLayer in node.BiasWeights.Keys)
                {
                    UpdateBiasNodeWeight(node, prevLayer, delta, momentum, i);
                }
            }

            for (var i = 0; i < layer.PreviousLayers.Length; i++)
            {
                RecurseBackpropagation(layer.PreviousLayers[i], deltas, momentum?.StepBackwards(i));
            }
        }
Esempio n. 8
0
        public void SingleInput_WeightOne()
        {
            var connList = new List <WeightedDirectedConnection <double> >();

            connList.Add(new WeightedDirectedConnection <double>(0, 1, 1.0));

            // Create graph.
            var digraph = WeightedAcyclicDirectedGraphBuilder <double> .Create(connList, 1, 1);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new AcyclicNeuralNet(digraph, actFn.Fn, false);

            // Activate and test.
            net.InputVector[0] = 0.0;
            net.Activate();
            Assert.AreEqual(0.5, net.OutputVector[0]);

            // Activate and test.
            net.InputVector[0] = 1.0;
            net.Activate();
            Assert.AreEqual(actFn.Fn(1), net.OutputVector[0]);

            // Activate and test.
            net.InputVector[0] = 10.0;
            net.Activate();
            Assert.AreEqual(actFn.Fn(10.0), net.OutputVector[0]);
        }
Esempio n. 9
0
        public void HiddenNode()
        {
            var connList = new List <WeightedDirectedConnection <double> >
            {
                new WeightedDirectedConnection <double>(0, 3, 0.5),
                new WeightedDirectedConnection <double>(1, 3, 0.5),
                new WeightedDirectedConnection <double>(3, 2, 2.0)
            };

            // Create graph.
            var digraph = WeightedAcyclicDirectedGraphBuilder <double> .Create(connList, 2, 1);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new AcyclicNeuralNet(digraph, actFn.Fn, false);

            // Activate and test.
            net.InputVector[0] = 0.0;
            net.InputVector[1] = 0.0;
            net.Activate();
            Assert.AreEqual(actFn.Fn(1.0), net.OutputVector[0]);

            // Activate and test.
            net.InputVector[0] = 0.5;
            net.InputVector[1] = 0.25;
            net.Activate();
            Assert.AreEqual(actFn.Fn(actFn.Fn(0.375) * 2.0), net.OutputVector[0]);
        }
Esempio n. 10
0
        /// <summary>
        /// Computes the probability of the prediction being True.
        /// </summary>
        /// <param name="x"></param>
        /// <returns></returns>
        public double PredictRaw(Vector x)
        {
            x = IncreaseDimensions(x, this.PolynomialFeatures);

            this.Preprocess(x);

            return(LogisticFunction.Compute(x.Insert(0, 1.0, false).Dot(Theta)));
        }
Esempio n. 11
0
        /// <summary>Converts an object into its XML representation.</summary>
        /// <param name="writer">The <see cref="T:System.Xml.XmlWriter" /> stream to which the object is
        /// serialized.</param>
        public override void WriteXml(XmlWriter writer)
        {
            writer.WriteAttributeString("LogisticFunction", LogisticFunction.GetType().Name);

            Xml.Write <Descriptor>(writer, Descriptor);
            Xml.Write <Vector>(writer, Theta);
            Xml.Write <int>(writer, PolynomialFeatures);
        }
        public LogisticFunctionEditorForm(LogisticFunction value)
        {
            this.Value = value.Clone();
            InitializeComponent();

            this.propertyGrid1.SelectedObject = this.Value;
            m_series = this.chart1.Series[0];
            UpdateData();
        }
Esempio n. 13
0
        /// <summary>Projects vector into a logistic kernel space.</summary>
        /// <param name="m">Kernel Matrix.</param>
        /// <param name="x">Vector in original space.</param>
        /// <returns>Vector in logistic kernel space.</returns>
        public Vector Project(Matrix m, Vector x)
        {
            var K = Vector.Zeros(m.Rows);

            for (var i = 0; i < K.Length; i++)
            {
                var xy = m[i].Dot(x);
                K[i] = LogisticFunction.Compute(Lambda * xy);
            }

            return(K);
        }
Esempio n. 14
0
        public void PopulateOutput()
        {
            var output = 0d;

            foreach (var previousNodeWeight in Weights)
            {
                output += previousNodeWeight.Key.Output * previousNodeWeight.Value.Value;
            }
            foreach (var previousLayerWeight in BiasWeights)
            {
                output += previousLayerWeight.Value.Value;
            }

            Output = LogisticFunction.ComputeOutput(output);
        }
Esempio n. 15
0
        private static void NegativeSampleInput(Layer layer, Layer inputLayer, Dictionary <Node, double> backwardsPassDeltas, int inputIndex)
        {
            var sumDeltaWeights = (double)0;

            foreach (var backPassDelta in backwardsPassDeltas)
            {
                sumDeltaWeights += backPassDelta.Value;
            }

            var inputNode = inputLayer.Nodes[inputIndex];

            foreach (var node in layer.Nodes)
            {
                var delta = sumDeltaWeights * LogisticFunction.ComputeDifferentialGivenOutput(node.Output);
                UpdateNodeWeight(node, inputNode, delta);
                UpdateBiasNodeWeight(node, inputLayer, delta);
            }
        }
        public override object EditValue(ITypeDescriptorContext context, IServiceProvider provider, object value)
        {
            if ((context != null) && (provider != null))
            {
                LogisticFunction function = value as LogisticFunction;
                if (function != null)
                {
                    using (LogisticFunctionEditorForm form = new LogisticFunctionEditorForm(function))
                    {
                        if (form.ShowDialog() == DialogResult.OK)
                        {
                            return(form.Value);
                        }
                    }
                }
            }

            return(value);
        }
Esempio n. 17
0
        private static Dictionary <Node, double> NegativeSampleOutput(Layer outputLayer, double currentOutput, double targetOutput, int outputIndex, double learningRate)
        {
            var outputNode = outputLayer.Nodes[outputIndex];

            var delta = LogisticFunction.ComputeDeltaOutput(currentOutput, targetOutput) * learningRate;

            foreach (var previousNode in outputNode.Weights.Keys)
            {
                UpdateNodeWeight(outputNode, previousNode, delta);
            }
            foreach (var previousBiasLayer in outputNode.BiasWeights.Keys)
            {
                UpdateBiasNodeWeight(outputNode, previousBiasLayer, delta);
            }

            return(new Dictionary <Node, double> {
                { outputNode, delta }
            });
        }
Esempio n. 18
0
        /// <summary>
        /// 3Layer Backpropagation Class
        /// </summary>
        /// <param name="inputWeight">Input Weight.</param>
        /// <param name="outputWeight">Output Weight.</param>
        /// <param name="hiddenLayer">Hidden layer.</param>
        /// <param name="outputLayer">Output layer.</param>
        /// <param name="hiddenLogisticFunc">Hidden Layer Logistic Function. Default = SigmoidFunc</param>
        /// <param name="outputLogisticFunc">Output Layer Logistic Function. Default = SigmoidFunc</param>
        /// <param name="lossFunc">Output Layer Loss Function. Default = MSE</param>
        /// <param name="learnRate">Learn rate. Default = 0.01</param>
        public BackPropagation(Matrix inputWeight, Matrix outputWeight, Matrix hiddenLayer, Matrix outputLayer,
                               LogisticFunctions hiddenLogisticFunc = LogisticFunctions.Sigmoid,
                               LogisticFunctions outputLogisticFunc = LogisticFunctions.Sigmoid,
                               LossFunctions lossFunc = LossFunctions.MSE, double learnRate = 0.01)
        {
            _inputWeight  = inputWeight;
            _outputWeight = outputWeight;
            _hiddenLayer  = hiddenLayer;
            _outputLayer  = outputLayer;

            HiddenLogisticFunc = hiddenLogisticFunc;
            OutputLogisticFunc = outputLogisticFunc;
            LossFunc           = lossFunc;

            _hiddenLogisticFunc = GetLogisticFunction(hiddenLogisticFunc);
            _outputLogisticFunc = GetLogisticFunction(outputLogisticFunc);
            _lossFunc           = GetLossFunction(lossFunc);

            LearnRate = learnRate;
        }
Esempio n. 19
0
        private bool PopulateIndexedOutput(Layer layer, int inputIndex, double inputValue)
        {
            if (!layer.PreviousLayers.Any())
            {
                layer.Nodes[inputIndex].Output = inputValue;
                return(true);
            }

            var shouldPopulateAllOutputs = false;

            foreach (var prevLayer in layer.PreviousLayers)
            {
                var isNextToInput = PopulateIndexedOutput(prevLayer, inputIndex, inputValue);

                if (isNextToInput)
                {
                    var inputNode = prevLayer.Nodes[inputIndex];
                    foreach (var node in layer.Nodes)
                    {
                        node.Output = node.Weights[inputNode].Value * inputNode.Output + node.BiasWeights[prevLayer].Value;
                        node.Output = LogisticFunction.ComputeOutput(node.Output);
                    }
                }
                else
                {
                    // if not next to input, all outputs need loading - will break if multiple inputs
                    shouldPopulateAllOutputs = true;
                }
            }

            if (shouldPopulateAllOutputs)
            {
                foreach (var node in layer.Nodes)
                {
                    node.PopulateOutput();
                }
            }

            return(false);
        }
Esempio n. 20
0
        private static Dictionary <Node, double> UpdateOutputLayer(Layer outputLayer, double[] currentOutputs, double[] targetOutputs, double learningRate, Momentum momentum)
        {
            var deltas = new Dictionary <Node, double>();

            for (var i = 0; i < outputLayer.Nodes.Length; i++)
            {
                var node  = outputLayer.Nodes[i];
                var delta = LogisticFunction.ComputeDeltaOutput(currentOutputs[i], targetOutputs[i]) * learningRate;
                deltas.Add(node, delta);
                foreach (var prevNode in node.Weights.Keys)
                {
                    UpdateNodeWeight(node, prevNode, delta, momentum, i);
                }

                foreach (var prevLayer in node.BiasWeights.Keys)
                {
                    UpdateBiasNodeWeight(node, prevLayer, delta, momentum, i);
                }
            }

            return(deltas);
        }
Esempio n. 21
0
        private static void RecurseNegativeSample(Layer layer, Layer previousLayer, Dictionary <Node, double> backwardsPassDeltas, int inputIndex)
        {
            if (!previousLayer.PreviousLayers.Any())
            {
                NegativeSampleInput(layer, previousLayer, backwardsPassDeltas, inputIndex);
                return;
            }

            var deltas = new Dictionary <Node, double>();

            foreach (var node in layer.Nodes)
            {
                var sumDeltaWeights = (double)0;

                foreach (var backPassNode in backwardsPassDeltas.Keys)
                {
                    sumDeltaWeights += backwardsPassDeltas[backPassNode] * backPassNode.Weights[node].Value;
                }
                var delta = sumDeltaWeights * LogisticFunction.ComputeDifferentialGivenOutput(node.Output);
                deltas.Add(node, delta);

                foreach (var prevNode in node.Weights.Keys)
                {
                    UpdateNodeWeight(node, prevNode, delta);
                }

                foreach (var prevLayer in node.BiasWeights.Keys)
                {
                    UpdateBiasNodeWeight(node, prevLayer, delta);
                }
            }

            foreach (var prevPrevLayer in previousLayer.PreviousLayers)
            {
                RecurseNegativeSample(previousLayer, prevPrevLayer, deltas, inputIndex);
            }
        }
        public void ComplexCyclic()
        {
            var connList = new List <WeightedDirectedConnection <double> >
            {
                new WeightedDirectedConnection <double>(0, 1, -2.0),
                new WeightedDirectedConnection <double>(0, 2, 1.0),
                new WeightedDirectedConnection <double>(1, 2, 1.0),
                new WeightedDirectedConnection <double>(2, 1, 1.0)
            };

            // Create graph.
            var digraph = WeightedDirectedGraphBuilder <double> .Create(connList, 1, 1);

            // Create neural net
            var actFn = new LogisticFunction();
            var net   = new CyclicNeuralNet(digraph, actFn.Fn, 1, false);

            // Simulate network in C# and compare calculated outputs with actual network outputs.
            double[] preArr  = new double[3];
            double[] postArr = new double[3];

            postArr[0]         = 3.0;
            net.InputVector[0] = 3.0;

            for (int i = 0; i < 10; i++)
            {
                preArr[1] = postArr[0] * -2.0 + postArr[2];
                preArr[2] = postArr[0] + postArr[1];

                postArr[1] = actFn.Fn(preArr[1]);
                postArr[2] = actFn.Fn(preArr[2]);

                net.Activate();

                Assert.AreEqual(postArr[1], net.OutputVector[0]);
            }
        }
Esempio n. 23
0
        public void Check(bool[] a, double[] b, double expected)
        {
            var res = new LogisticFunction().CalculateScore(a, b);

            res.ShouldBe(expected, 0.01);
        }
Esempio n. 24
0
        public static void Run(bool verbose)
        {
            Stopwatch sw = new Stopwatch();

            NdArray inputArrayCpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE));
            NdArray inputArrayGpu = new NdArray(BenchDataMaker.GetRealArray(INPUT_SIZE));

            Ensure.Argument(inputArrayGpu).NotNull();
            Ensure.Argument(inputArrayCpu).NotNull();

            //Linear
            Linear linear = new Linear(verbose, INPUT_SIZE, OUTPUT_SIZE);

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(linear.Name);
            }

            sw.Restart();
            NdArray[] gradArrayCpu = linear.Forward(verbose, inputArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            Ensure.Argument(gradArrayCpu).NotNull();

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data; // Use Data as Grad

            sw.Restart();
            linear.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            if (linear.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = linear.Forward(verbose, inputArrayGpu);
                sw.Stop();
                if (verbose)
                {
                    RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
                }

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                linear.Backward(verbose, gradArrayGpu);
                sw.Stop();
                if (verbose)
                {
                    RILogManager.Default?.SendDebug("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
                }
            }
            if (verbose)
            {
                RILogManager.Default?.ExitMethod(linear.Name);
            }

            //Tanh
            Tanh tanh = new Tanh();

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(tanh.Name);
            }

            sw.Restart();
            gradArrayCpu = tanh.Forward(verbose, inputArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            tanh.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            if (tanh.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, tanh, inputArrayGpu);
            }

            if (verbose)
            {
                RILogManager.Default?.ExitMethod(tanh.Name);
            }



            //Sigmoid
            Sigmoid sigmoid = new Sigmoid();

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(sigmoid.Name);
            }

            sw.Restart();
            gradArrayCpu = sigmoid.Forward(verbose, inputArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sigmoid.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }

            if (sigmoid.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, sigmoid, inputArrayGpu);
            }
            if (verbose)
            {
                RILogManager.Default?.ExitMethod(tanh.Name);
            }


            //Softmax
            Softmax sm = new Softmax();

            RILogManager.Default?.EnterMethod(sm.Name);

            sw.Restart();
            gradArrayCpu = sm.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sm.Backward(verbose, gradArrayCpu);
            sw.Stop();
            if (verbose)
            {
                RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }
            if (verbose)
            {
                RILogManager.Default?.ExitMethod(sm.Name);
            }



            //Softplus
            Softplus sp = new Softplus();

            if (verbose)
            {
                RILogManager.Default?.EnterMethod(sp.Name);
            }

            sw.Restart();
            gradArrayCpu = sp.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            sp.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            RILogManager.Default?.ExitMethod(sp.Name);


            //ReLU
            ReLU relu = new ReLU();

            RILogManager.Default?.EnterMethod(relu.Name);

            sw.Restart();
            gradArrayCpu = relu.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            relu.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (relu.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, relu, inputArrayGpu);
            }

            RILogManager.Default?.ExitMethod(relu.Name);


            //LeakyReLU
            LeakyReLU leakyRelu = new LeakyReLU();

            RILogManager.Default?.EnterMethod(leakyRelu.Name);

            sw.Restart();
            gradArrayCpu = leakyRelu.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            leakyRelu.Backward(verbose, gradArrayCpu);
            sw.Stop();

            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (leakyRelu.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, leakyRelu, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(leakyRelu.Name);


            //ReLuTanh
            ReLuTanh rth = new ReLuTanh();

            RILogManager.Default?.EnterMethod(rth.Name);

            sw.Restart();
            gradArrayCpu = rth.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            rth.Backward(verbose, gradArrayCpu);
            sw.Stop();

            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (rth.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, rth, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(rth.Name);


            ////Swish
            //Swish swi = new Swish();
            //RILogManager.Default?.SendDebug(swi.Name);

            //sw.Restart();
            //gradArrayCpu = swi.Forward(inputArrayCpu);
            //sw.Stop();
            //RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            //gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            //sw.Restart();
            //swi.Backward(gradArrayCpu);
            //sw.Stop();

            //RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");


            NdArray inputImageArrayGpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);
            NdArray inputImageArrayCpu = new NdArray(BenchDataMaker.GetRealArray(3 * 256 * 256 * 5), new[] { 3, 256, 256 }, 5);

            //MaxPooling
            MaxPooling maxPooling = new MaxPooling(3);

            RILogManager.Default?.EnterMethod(maxPooling.Name);

            sw.Restart();
            NdArray[] gradImageArrayCpu = maxPooling.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            maxPooling.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (maxPooling.SetGpuEnable(true))
            {
                sw.Restart();
                maxPooling.Forward(verbose, inputImageArrayGpu);
                sw.Stop();
                RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                // There is no implementation for memory transfer only
                RILogManager.Default?.SendDebug("Backward[Gpu] : None");
            }
            RILogManager.Default?.ExitMethod(maxPooling.Name);


            //AvgPooling
            AveragePooling avgPooling = new AveragePooling(3);

            RILogManager.Default?.EnterMethod(avgPooling.Name);

            sw.Restart();
            gradImageArrayCpu = avgPooling.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            avgPooling.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            RILogManager.Default?.ExitMethod(avgPooling.Name);


            //Conv2D
            Convolution2D conv2d = new Convolution2D(verbose, 3, 3, 3);

            RILogManager.Default?.EnterMethod(conv2d.Name);

            sw.Restart();
            gradImageArrayCpu = conv2d.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            conv2d.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (conv2d.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, conv2d, inputArrayGpu);
            }

            RILogManager.Default?.ExitMethod(conv2d.Name);


            //Deconv2D
            Deconvolution2D deconv2d = new Deconvolution2D(verbose, 3, 3, 3);

            RILogManager.Default?.EnterMethod(deconv2d.Name);

            sw.Restart();
            gradImageArrayCpu = deconv2d.Forward(verbose, inputImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradImageArrayCpu[0].Grad = gradImageArrayCpu[0].Data;

            sw.Restart();
            deconv2d.Backward(verbose, gradImageArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (deconv2d.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, deconv2d, inputArrayGpu);
            }

            RILogManager.Default?.ExitMethod(deconv2d.Name);


            //Dropout
            Dropout dropout = new Dropout();

            RILogManager.Default?.EnterMethod(dropout.Name);

            sw.Restart();
            gradArrayCpu = dropout.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            dropout.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (dropout.SetGpuEnable(true))
            {
                sw.Restart();
                NdArray[] gradArrayGpu = dropout.Forward(verbose, inputArrayGpu);
                sw.Stop();
                RILogManager.Default?.SendDebug("Forward [Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

                gradArrayGpu[0].Grad = gradArrayGpu[0].Data;

                sw.Restart();
                dropout.Backward(verbose, gradArrayGpu);
                sw.Stop();
                RILogManager.Default?.SendDebug("Backward[Gpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            }
            RILogManager.Default?.ExitMethod(dropout.Name);

            //ArcSinH
            ArcSinH a = new ArcSinH();

            RILogManager.Default?.EnterMethod(a.Name);

            sw.Restart();
            gradArrayCpu = a.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            a.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (a.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, a, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(a.Name);

            //ELU
            ELU e = new ELU();

            RILogManager.Default?.EnterMethod(e.Name);

            sw.Restart();
            gradArrayCpu = e.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            e.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");
            RILogManager.Default?.ExitMethod(e.Name);

            //LeakyReluShifted
            LeakyReLUShifted lrs = new LeakyReLUShifted();

            RILogManager.Default?.EnterMethod(lrs.Name);

            sw.Restart();
            gradArrayCpu = lrs.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            lrs.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (lrs.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, lrs, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(lrs.Name);


            //Logistic
            LogisticFunction lf = new LogisticFunction();

            RILogManager.Default?.EnterMethod(lf.Name);

            sw.Restart();
            gradArrayCpu = lf.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            lf.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (lf.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, lf, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(lf.Name);


            //MaxMinusOne
            MaxMinusOne mmo = new MaxMinusOne();

            RILogManager.Default?.EnterMethod(mmo.Name);

            sw.Restart();
            gradArrayCpu = mmo.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            mmo.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (mmo.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, mmo, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(mmo.Name);


            //ScaledELU
            ScaledELU se = new ScaledELU();

            RILogManager.Default?.EnterMethod(se.Name);

            sw.Restart();
            gradArrayCpu = se.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            se.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (se.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, se, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(se.Name);


            //Sine
            Sine s = new Sine();

            RILogManager.Default?.EnterMethod(s.Name);

            sw.Restart();
            gradArrayCpu = s.Forward(verbose, inputArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Forward [Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            gradArrayCpu[0].Grad = gradArrayCpu[0].Data;

            sw.Restart();
            s.Backward(verbose, gradArrayCpu);
            sw.Stop();
            RILogManager.Default?.SendDebug("Backward[Cpu] : " + (sw.ElapsedTicks / (Stopwatch.Frequency / (1000L * 1000L))).ToString("n0") + "μs");

            if (s.SetGpuEnable(true))
            {
                HandleGPU(verbose, sw, s, inputArrayGpu);
            }
            RILogManager.Default?.ExitMethod(s.Name);
        }