public override NDArray Backward(NDArray input, NDArray gradOutput, ModelMode mode) { DNN.ConvolutionBackwardData(bwdDataAlgo, cd, workspace, weight, gradOutput, gradInput); DNN.ConvolutionBackwardFilter(bwdFilterAlgo, cd, workspace, input, gradOutput, gradWeight); DNN.ConvolutionBackwardBias(cd, gradOutput, gradBias); return(gradInput); }
public Conv2Cudnn(IAllocator allocator, SeedSource seedSource, DType elementType, int batchSize, int inputWidth, int inputHeight, int nInputPlane, int nOutputPlane, ConvolutionDesc2d cd) : base(allocator, seedSource, elementType, batchSize, inputWidth, inputHeight, nInputPlane, nOutputPlane, cd) { // Reshape weight and bias - CuDNN expects the dimensions to be structured slightly differently this.weight = ViewReplace(this.weight, nOutputPlane, nInputPlane, cd.kH, cd.kW); this.bias = ViewReplace(this.bias, 1, nOutputPlane, 1, 1); this.gradWeight = ViewReplace(this.gradWeight, this.weight.Shape); this.gradBias = ViewReplace(this.gradBias, this.bias.Shape); var fwdWorkspace = DNN.GetConvolutionForwardWorkspaceSize(allocator, fwdAlgo, cd, new TensorShape(elementType, new long[] { batchSize, nInputPlane, inputHeight, inputWidth }), new TensorShape(weight), new TensorShape(activation)); var bwdFilterWorkspace = DNN.GetConvolutionBackwardFilterWorkspaceSize(allocator, bwdFilterAlgo, cd, new TensorShape(elementType, new long[] { batchSize, nInputPlane, inputHeight, inputWidth }), new TensorShape(activation), new TensorShape(weight)); var bwdFilterInputWorkspace = DNN.GetConvolutionBackwardDataWorkspaceSize(allocator, bwdDataAlgo, cd, new TensorShape(weight), new TensorShape(activation), new TensorShape(elementType, new long[] { batchSize, nInputPlane, inputHeight, inputWidth })); var workspaceSize = Math.Max(Math.Max(fwdWorkspace, bwdFilterWorkspace), bwdFilterInputWorkspace); this.workspace = (CudaStorage)allocator.Allocate(DType.UInt8, workspaceSize); }
static void Main(string[] args) { double[][][] d = IO.LoadAllText(@"C:\DNN\test.txt"); double[][] o = new DNN(@"C:\DNN\a.dnn").Compute(d[0]); ShowText(d[0], d[1], o); Console.ReadLine(); }
public override Tensor Forward(Tensor input, ModelMode mode) { using (var input4d = As4d(input)) using (var activation4d = As4d(activation)) { DNN.SoftmaxForward(DNNSoftmaxAlgorithm.Log, DNNSoftmaxMode.Instance, input4d, activation4d); } return(activation); }
public override Tensor Backward(Tensor input, Tensor gradOutput, ModelMode mode) { using (var activation4d = As4d(activation)) using (var gradInput4d = As4d(gradInput)) using (var gradOutput4d = As4d(gradOutput)) { DNN.SoftmaxBackward(DNNSoftmaxAlgorithm.Log, DNNSoftmaxMode.Instance, activation4d, gradInput4d, gradOutput4d); } return(gradInput); }
static void Main(string[] args) { double[][][] io = IO.LoadAllText(@"C:\DNN\train.txt"); DNN d = new DNN(@"C:\DNN\a.dnn"); new Task(() => { d.Train(io[0], io[1]); }).Start(); while (true) { Thread.Sleep(1000 * 10); Console.WriteLine(d.e); d.Save(); } }
public override NDArray Backward(NDArray input, NDArray gradOutput, ModelMode mode) { DNN.PoolingBackward(poolingDesc, input, activation, gradInput, gradOutput); return(gradInput); }
public override NDArray Forward(NDArray input, ModelMode mode) { DNN.PoolingForward(poolingDesc, input, activation); return(activation); }
public override Tensor Backward(Tensor input, Tensor gradOutput, ModelMode mode) { DNN.PoolingBackward(poolingDesc, input, activation, gradInput, gradOutput); return(gradInput); }
public override Tensor Forward(Tensor input, ModelMode mode) { DNN.PoolingForward(poolingDesc, input, activation); return(activation); }
public static void ComputeImg(string s) { double[] d = IO.LoadImg(s); double[][] o = new DNN(@"C:\DNN\a.dnn").Compute(new double[][] { d }); Console.WriteLine(IO.double2int(o[0])); }
public override NDArray Forward(NDArray input, ModelMode mode) { DNN.ConvForward(fwdAlgo, cd, workspace, input, weight, activation); DNN.AddTensor(bias, activation); // dims of bias with size = 1 are automatically broadcast over other dimensions return(activation); }