Exemplo n.º 1
0
 public override NDArray Backward(NDArray input, NDArray gradOutput, ModelMode mode)
 {
     DNN.ConvolutionBackwardData(bwdDataAlgo, cd, workspace, weight, gradOutput, gradInput);
     DNN.ConvolutionBackwardFilter(bwdFilterAlgo, cd, workspace, input, gradOutput, gradWeight);
     DNN.ConvolutionBackwardBias(cd, gradOutput, gradBias);
     return(gradInput);
 }
Exemplo n.º 2
0
        public Conv2Cudnn(IAllocator allocator, SeedSource seedSource, DType elementType, int batchSize, int inputWidth, int inputHeight, int nInputPlane, int nOutputPlane, ConvolutionDesc2d cd)
            : base(allocator, seedSource, elementType, batchSize, inputWidth, inputHeight, nInputPlane, nOutputPlane, cd)
        {
            // Reshape weight and bias - CuDNN expects the dimensions to be structured slightly differently
            this.weight     = ViewReplace(this.weight, nOutputPlane, nInputPlane, cd.kH, cd.kW);
            this.bias       = ViewReplace(this.bias, 1, nOutputPlane, 1, 1);
            this.gradWeight = ViewReplace(this.gradWeight, this.weight.Shape);
            this.gradBias   = ViewReplace(this.gradBias, this.bias.Shape);


            var fwdWorkspace = DNN.GetConvolutionForwardWorkspaceSize(allocator, fwdAlgo, cd,
                                                                      new TensorShape(elementType, new long[] { batchSize, nInputPlane, inputHeight, inputWidth }),
                                                                      new TensorShape(weight),
                                                                      new TensorShape(activation));

            var bwdFilterWorkspace = DNN.GetConvolutionBackwardFilterWorkspaceSize(allocator, bwdFilterAlgo, cd,
                                                                                   new TensorShape(elementType, new long[] { batchSize, nInputPlane, inputHeight, inputWidth }),
                                                                                   new TensorShape(activation),
                                                                                   new TensorShape(weight));

            var bwdFilterInputWorkspace = DNN.GetConvolutionBackwardDataWorkspaceSize(allocator, bwdDataAlgo, cd,
                                                                                      new TensorShape(weight),
                                                                                      new TensorShape(activation),
                                                                                      new TensorShape(elementType, new long[] { batchSize, nInputPlane, inputHeight, inputWidth }));

            var workspaceSize = Math.Max(Math.Max(fwdWorkspace, bwdFilterWorkspace), bwdFilterInputWorkspace);

            this.workspace = (CudaStorage)allocator.Allocate(DType.UInt8, workspaceSize);
        }
Exemplo n.º 3
0
        static void Main(string[] args)
        {
            double[][][] d = IO.LoadAllText(@"C:\DNN\test.txt");
            double[][]   o = new DNN(@"C:\DNN\a.dnn").Compute(d[0]);

            ShowText(d[0], d[1], o);
            Console.ReadLine();
        }
Exemplo n.º 4
0
        public override Tensor Forward(Tensor input, ModelMode mode)
        {
            using (var input4d = As4d(input))
                using (var activation4d = As4d(activation))
                {
                    DNN.SoftmaxForward(DNNSoftmaxAlgorithm.Log, DNNSoftmaxMode.Instance, input4d, activation4d);
                }

            return(activation);
        }
Exemplo n.º 5
0
        public override Tensor Backward(Tensor input, Tensor gradOutput, ModelMode mode)
        {
            using (var activation4d = As4d(activation))
                using (var gradInput4d = As4d(gradInput))
                    using (var gradOutput4d = As4d(gradOutput))
                    {
                        DNN.SoftmaxBackward(DNNSoftmaxAlgorithm.Log, DNNSoftmaxMode.Instance, activation4d, gradInput4d, gradOutput4d);
                    }

            return(gradInput);
        }
Exemplo n.º 6
0
        static void Main(string[] args)
        {
            double[][][] io = IO.LoadAllText(@"C:\DNN\train.txt");
            DNN          d  = new DNN(@"C:\DNN\a.dnn");

            new Task(() => { d.Train(io[0], io[1]); }).Start();

            while (true)
            {
                Thread.Sleep(1000 * 10);
                Console.WriteLine(d.e);
                d.Save();
            }
        }
Exemplo n.º 7
0
 public override NDArray Backward(NDArray input, NDArray gradOutput, ModelMode mode)
 {
     DNN.PoolingBackward(poolingDesc, input, activation, gradInput, gradOutput);
     return(gradInput);
 }
Exemplo n.º 8
0
 public override NDArray Forward(NDArray input, ModelMode mode)
 {
     DNN.PoolingForward(poolingDesc, input, activation);
     return(activation);
 }
Exemplo n.º 9
0
 public override Tensor Backward(Tensor input, Tensor gradOutput, ModelMode mode)
 {
     DNN.PoolingBackward(poolingDesc, input, activation, gradInput, gradOutput);
     return(gradInput);
 }
Exemplo n.º 10
0
 public override Tensor Forward(Tensor input, ModelMode mode)
 {
     DNN.PoolingForward(poolingDesc, input, activation);
     return(activation);
 }
Exemplo n.º 11
0
 public static void ComputeImg(string s)
 {
     double[]   d = IO.LoadImg(s);
     double[][] o = new DNN(@"C:\DNN\a.dnn").Compute(new double[][] { d });
     Console.WriteLine(IO.double2int(o[0]));
 }
Exemplo n.º 12
0
 public override NDArray Forward(NDArray input, ModelMode mode)
 {
     DNN.ConvForward(fwdAlgo, cd, workspace, input, weight, activation);
     DNN.AddTensor(bias, activation); // dims of bias with size = 1 are automatically broadcast over other dimensions
     return(activation);
 }