cudnnLRNCrossChannelBackward() private method

private cudnnLRNCrossChannelBackward ( cudnnHandle handle, cudnnLRNDescriptor normDesc, cudnnLRNMode lrnMode, double &alpha, cudnnTensorDescriptor srcDesc, ManagedCuda.BasicTypes.CUdeviceptr srcData, cudnnTensorDescriptor srcDiffDesc, ManagedCuda.BasicTypes.CUdeviceptr srcDiffData, cudnnTensorDescriptor destDesc, ManagedCuda.BasicTypes.CUdeviceptr destData, double &beta, cudnnTensorDescriptor destDiffDesc, ManagedCuda.BasicTypes.CUdeviceptr destDiffData ) : cudnnStatus
handle cudnnHandle
normDesc cudnnLRNDescriptor
lrnMode cudnnLRNMode
alpha double
srcDesc cudnnTensorDescriptor
srcData ManagedCuda.BasicTypes.CUdeviceptr
srcDiffDesc cudnnTensorDescriptor
srcDiffData ManagedCuda.BasicTypes.CUdeviceptr
destDesc cudnnTensorDescriptor
destData ManagedCuda.BasicTypes.CUdeviceptr
beta double
destDiffDesc cudnnTensorDescriptor
destDiffData ManagedCuda.BasicTypes.CUdeviceptr
return cudnnStatus
Esempio n. 1
0
 public void cudnnLRNCrossChannelBackward(
     cudnnLRNMode lrnMode,
     ref double alpha,
     cudnnTensorDescriptor srcDesc,
     CUdeviceptr srcData,
     cudnnTensorDescriptor srcDiffDesc,
     CUdeviceptr srcDiffData,
     cudnnTensorDescriptor destDesc,
     CUdeviceptr destData,
     ref double beta,
     cudnnTensorDescriptor destDiffDesc,
     CUdeviceptr destDiffData)
 {
     res = CudaDNNNativeMethods.cudnnLRNCrossChannelBackward(_handle, _desc, lrnMode, ref alpha, srcDesc, srcData, srcDiffDesc, srcDiffData, destDesc, destData, ref beta, destDiffDesc, destDiffData);
     Debug.WriteLine(String.Format("{0:G}, {1}: {2}", DateTime.Now, "cudnnLRNCrossChannelBackward", res));
     if (res != cudnnStatus.Success)
     {
         throw new CudaDNNException(res);
     }
 }
Esempio n. 2
0
 /// <summary>
 /// This function performs the backward LRN layer computation.
 /// </summary>
 /// <param name="lrnMode">LRN layer mode of operation. Currently only
 /// CUDNN_LRN_CROSS_CHANNEL_DIM1 is implemented. Normalization is
 /// performed along the tensor's dimA[1].</param>
 /// <param name="alpha">Pointer to scaling factors (in host memory) used to blend the layer output
 /// value with prior value in the destination tensor as follows: dstValue =
 /// alpha[0]*resultValue + beta[0]*priorDstValue. Please refer to this section
 /// for additional details.</param>
 /// <param name="yDesc">Tensor descriptor and pointer in device memory for the bottom layer's
 /// data. (Bottom layer is the earlier layer in the computation graph during
 /// inference).</param>
 /// <param name="y">Tensor descriptor and pointer in device memory for the bottom layer's
 /// data. (Bottom layer is the earlier layer in the computation graph during
 /// inference).</param>
 /// <param name="dyDesc">Tensor descriptor and pointer in device memory for the top layer's
 /// cumulative loss differential data (error backpropagation). (Top layer is the
 /// later layer in the computation graph during inference).</param>
 /// <param name="dy">Tensor descriptor and pointer in device memory for the top layer's
 /// cumulative loss differential data (error backpropagation). (Top layer is the
 /// later layer in the computation graph during inference).</param>
 /// <param name="xDesc">Tensor descriptor and pointer in device memory for the bottom layer's
 /// data. (Bottom layer is the earlier layer in the computation graph
 /// during inference). Note that these values are not modified during
 /// backpropagation.</param>
 /// <param name="x">Tensor descriptor and pointer in device memory for the bottom layer's
 /// data. (Bottom layer is the earlier layer in the computation graph
 /// during inference). Note that these values are not modified during
 /// backpropagation.</param>
 /// <param name="beta">Pointer to scaling factors (in host memory) used to blend the layer output
 /// value with prior value in the destination tensor as follows: dstValue =
 /// alpha[0]*resultValue + beta[0]*priorDstValue. Please refer to this section
 /// for additional details.</param>
 /// <param name="dxDesc">Tensor descriptor and pointer in device memory for the bottom layer's
 /// cumulative loss differential data (error backpropagation). (Bottom layer is
 /// the earlier layer in the computation graph during inference).</param>
 /// <param name="dx">Tensor descriptor and pointer in device memory for the bottom layer's
 /// cumulative loss differential data (error backpropagation). (Bottom layer is
 /// the earlier layer in the computation graph during inference).</param>
 public void cudnnLRNCrossChannelBackward(
     cudnnLRNMode lrnMode,
     ref double alpha,
     cudnnTensorDescriptor yDesc,
     CUdeviceptr y,
     cudnnTensorDescriptor dyDesc,
     CUdeviceptr dy,
     cudnnTensorDescriptor xDesc,
     CUdeviceptr x,
     ref double beta,
     cudnnTensorDescriptor dxDesc,
     CUdeviceptr dx)
 {
     res = CudaDNNNativeMethods.cudnnLRNCrossChannelBackward(_handle, _desc, lrnMode, ref alpha, yDesc, y, dyDesc, dy, xDesc, x, ref beta, dxDesc, dx);
     Debug.Write("");            //Line(String.Format("{0:G}, {1}: {2}", DateTime.Now, "cudnnLRNCrossChannelBackward", res));
     if (res != cudnnStatus.Success)
     {
         throw new CudaDNNException(res);
     }
 }