public override void Backward(SuperArray outputgrad)
        {
            int pad = 0;

            if (Padding == PaddingType.Same)
            {
                pad = 1;
            }
            else if (Padding == PaddingType.Full)
            {
                pad = 2;
            }

            var dout_flat = outputgrad.Transpose(2, 0, 1).Reshape(Filters, -1);
            var dW        = Ops.Dot(dout_flat, xCols.Transpose());

            dW = dW.Reshape(Params["w"].Data.Shape);
            var db     = Ops.Sum(outputgrad, new uint[] { 0, 1, 2 }).Reshape(Filters, -1);
            var W_flat = Params["w"].Data.Reshape(Filters, -1);

            var dX_col = Ops.Dot(W_flat.Transpose(), dout_flat);

            Input.Grad = ImUtil.Col2Im(dX_col, Input.Data.Shape.Dims, Tuple.Create(KernalSize, KernalSize), pad, Strides);

            Params["w"].Grad = dW;
            if (UseBias)
            {
                Params["b"].Grad = db;
            }
        }
        public override void Backward(SuperArray outputgrad)
        {
            SuperArray dX_col = Ops.Constant(0, xCols.Shape);

            var(n, c, h, w) = Input.Data.GetConv2DShape();
            int pad = 0;

            if (Padding == PaddingType.Same)
            {
                pad = 1;
            }
            else if (Padding == PaddingType.Full)
            {
                pad = 2;
            }

            var dout_flat = outputgrad.Transpose(2, 3, 0, 1);
            var dX        = ImUtil.Col2Im(dout_flat, Input.Data.Shape.Dims, PoolSize, pad, Strides);

            Input.Grad = dX.Reshape(n, c, h, w);
        }