Example #1
0
 public void cudnnLRNCrossChannelForward(
     cudnnLRNMode lrnMode,
     double alpha,
     cudnnTensorDescriptor srcDesc,
     CUdeviceptr srcData,
     double beta,
     cudnnTensorDescriptor destDesc,
     CUdeviceptr destData)
 {
     res = CudaDNNNativeMethods.cudnnLRNCrossChannelForward(_handle, _desc, lrnMode, ref alpha, srcDesc, srcData, ref beta, destDesc, destData);
     Debug.WriteLine(String.Format("{0:G}, {1}: {2}", DateTime.Now, "cudnnLRNCrossChannelForward", res));
     if (res != cudnnStatus.Success)
     {
         throw new CudaDNNException(res);
     }
 }
Example #2
0
 /// <summary>
 /// This function performs the forward LRN layer computation.
 /// </summary>
 /// <param name="lrnMode">LRN layer mode of operation. Currently only
 /// CUDNN_LRN_CROSS_CHANNEL_DIM1 is implemented. Normalization is
 /// performed along the tensor's dimA[1].</param>
 /// <param name="alpha">Pointer to scaling factors (in host memory) used to blend the layer output
 /// value with prior value in the destination tensor as follows: dstValue =
 /// alpha[0]*resultValue + beta[0]*priorDstValue. Please refer to this section
 /// for additional details.</param>
 /// <param name="xDesc">Tensor descriptor objects for the input and output tensors.</param>
 /// <param name="x">Input tensor data pointer in device memory.</param>
 /// <param name="beta">Pointer to scaling factors (in host memory) used to blend the layer output
 /// value with prior value in the destination tensor as follows: dstValue =
 /// alpha[0]*resultValue + beta[0]*priorDstValue. Please refer to this section
 /// for additional details.</param>
 /// <param name="yDesc">Tensor descriptor objects for the input and output tensors.</param>
 /// <param name="y">Output tensor data pointer in device memory.</param>
 public void cudnnLRNCrossChannelForward(
     cudnnLRNMode lrnMode,
     float alpha,
     cudnnTensorDescriptor xDesc,
     CUdeviceptr x,
     float beta,
     cudnnTensorDescriptor yDesc,
     CUdeviceptr y)
 {
     res = CudaDNNNativeMethods.cudnnLRNCrossChannelForward(_handle, _desc, lrnMode, ref alpha, xDesc, x, ref beta, yDesc, y);
     Debug.Write("");            //Line(String.Format("{0:G}, {1}: {2}", DateTime.Now, "cudnnLRNCrossChannelForward", res));
     if (res != cudnnStatus.Success)
     {
         throw new CudaDNNException(res);
     }
 }
        public void cudnnLRNCrossChannelForward(
									  cudnnLRNMode lrnMode,
									  double alpha,
									  cudnnTensorDescriptor srcDesc,
									  CUdeviceptr srcData,
									  double beta,
									  cudnnTensorDescriptor destDesc,
									  CUdeviceptr destData)
        {
            res = CudaDNNNativeMethods.cudnnLRNCrossChannelForward(_handle, _desc, lrnMode, ref alpha, srcDesc, srcData, ref beta, destDesc, destData);
            Debug.WriteLine(String.Format("{0:G}, {1}: {2}", DateTime.Now, "cudnnLRNCrossChannelForward", res));
            if (res != cudnnStatus.Success) throw new CudaDNNException(res);
        }
		public static extern cudnnStatus cudnnLRNCrossChannelBackward(
									  cudnnHandle                    handle,
									  cudnnLRNDescriptor             normDesc,
									  cudnnLRNMode                   lrnMode,
									  ref double alpha,
									  cudnnTensorDescriptor    srcDesc,
									  CUdeviceptr srcData,
									  cudnnTensorDescriptor    srcDiffDesc,
									  CUdeviceptr srcDiffData,
									  cudnnTensorDescriptor    destDesc,
									  CUdeviceptr destData,
									  ref double beta,
									  cudnnTensorDescriptor    destDiffDesc,
									  CUdeviceptr destDiffData);
		public static extern cudnnStatus cudnnLRNCrossChannelForward(
									  cudnnHandle                    handle,
									  cudnnLRNDescriptor             normDesc,
									  cudnnLRNMode                   lrnMode,
									  ref float alpha,
									  cudnnTensorDescriptor    srcDesc,
									  CUdeviceptr srcData,
									  ref float beta,
									  cudnnTensorDescriptor    destDesc,
									  CUdeviceptr destData);