cudnnDivisiveNormalizationBackward() private méthode

private cudnnDivisiveNormalizationBackward ( cudnnHandle handle, cudnnLRNDescriptor normDesc, cudnnDivNormMode mode, double &alpha, cudnnTensorDescriptor srcDesc, ManagedCuda.BasicTypes.CUdeviceptr srcData, ManagedCuda.BasicTypes.CUdeviceptr srcMeansData, ManagedCuda.BasicTypes.CUdeviceptr srcDiffData, ManagedCuda.BasicTypes.CUdeviceptr tempData, ManagedCuda.BasicTypes.CUdeviceptr tempData2, double &beta, cudnnTensorDescriptor destDataDesc, ManagedCuda.BasicTypes.CUdeviceptr destDataDiff, ManagedCuda.BasicTypes.CUdeviceptr destMeansDiff ) : cudnnStatus
handle cudnnHandle
normDesc cudnnLRNDescriptor
mode cudnnDivNormMode
alpha double
srcDesc cudnnTensorDescriptor
srcData ManagedCuda.BasicTypes.CUdeviceptr
srcMeansData ManagedCuda.BasicTypes.CUdeviceptr
srcDiffData ManagedCuda.BasicTypes.CUdeviceptr
tempData ManagedCuda.BasicTypes.CUdeviceptr
tempData2 ManagedCuda.BasicTypes.CUdeviceptr
beta double
destDataDesc cudnnTensorDescriptor
destDataDiff ManagedCuda.BasicTypes.CUdeviceptr
destMeansDiff ManagedCuda.BasicTypes.CUdeviceptr
Résultat cudnnStatus
Exemple #1
0
 public void cudnnDivisiveNormalizationBackward(
     cudnnDivNormMode mode,
     double alpha,
     cudnnTensorDescriptor srcDesc,                                                          // same desc for diff, means, temp, temp2
     CUdeviceptr srcData,
     CUdeviceptr srcMeansData,                                                               // if NULL, means are assumed to be zero
     CUdeviceptr srcDiffData,
     CUdeviceptr tempData,
     CUdeviceptr tempData2,
     double betaData,
     cudnnTensorDescriptor destDataDesc,                                                     // same desc for dest, means, meansDiff
     CUdeviceptr destDataDiff,                                                               // output data differential
     CUdeviceptr destMeansDiff                                                               // output means differential, can be NULL
     )
 {
     res = CudaDNNNativeMethods.cudnnDivisiveNormalizationBackward(_handle, _desc, mode, ref alpha, srcDesc, srcData, srcMeansData, srcDiffData, tempData, tempData2, ref betaData, destDataDesc, destDataDiff, destMeansDiff);
     Debug.WriteLine(String.Format("{0:G}, {1}: {2}", DateTime.Now, "cudnnDivisiveNormalizationBackward", res));
     if (res != cudnnStatus.Success)
     {
         throw new CudaDNNException(res);
     }
 }
Exemple #2
0
 /// <summary>
 /// This function performs the backward DivisiveNormalization layer computation.
 /// </summary>
 /// <param name="mode">DivisiveNormalization layer mode of operation. Currently only
 /// CUDNN_DIVNORM_PRECOMPUTED_MEANS is implemented. Normalization
 /// is performed using the means input tensor that is expected to be
 /// precomputed by the user.</param>
 /// <param name="alpha">Pointer to scaling factors (in host memory) used to blend the layer output
 /// value with prior value in the destination tensor as follows: dstValue =
 /// alpha[0]*resultValue + beta[0]*priorDstValue. Please refer to this section
 /// for additional details.</param>
 /// <param name="xDesc">Tensor descriptor and pointers in device memory for the bottom layer's
 /// data and means. (Bottom layer is the earlier layer in the computation
 /// graph during inference). Note: the means tensor is expected to be
 /// precomputed by the user. It can also contain any valid values (not required
 /// to be actual means, and can be for instance a result of a convolution with
 /// a Gaussian kernel).</param>
 /// <param name="x">Tensor descriptor and pointers in device memory for the bottom layer's
 /// data and means. (Bottom layer is the earlier layer in the computation
 /// graph during inference). Note: the means tensor is expected to be
 /// precomputed by the user. It can also contain any valid values (not required
 /// to be actual means, and can be for instance a result of a convolution with
 /// a Gaussian kernel).</param>
 /// <param name="means">Tensor descriptor and pointers in device memory for the bottom layer's
 /// data and means. (Bottom layer is the earlier layer in the computation
 /// graph during inference). Note: the means tensor is expected to be
 /// precomputed by the user. It can also contain any valid values (not required
 /// to be actual means, and can be for instance a result of a convolution with
 /// a Gaussian kernel).</param>
 /// <param name="dy">Tensor pointer in device memory for the top layer's cumulative loss
 /// differential data (error backpropagation). (Top layer is the later layer in
 /// the computation graph during inference).</param>
 /// <param name="temp">Temporary tensors in device memory. These are used for computing
 /// intermediate values during the backward pass. These tensors do not have
 /// to be preserved from forward to backward pass. Both use srcDesc as a
 /// descriptor.</param>
 /// <param name="temp2">Temporary tensors in device memory. These are used for computing
 /// intermediate values during the backward pass. These tensors do not have
 /// to be preserved from forward to backward pass. Both use srcDesc as a
 /// descriptor.</param>
 /// <param name="beta">Pointer to scaling factors (in host memory) used to blend the layer output
 /// value with prior value in the destination tensor as follows: dstValue =
 /// alpha[0]*resultValue + beta[0]*priorDstValue. Please refer to this section
 /// for additional details.</param>
 /// <param name="dXdMeansDesc">Tensor descriptor for destDataDiff and destMeansDiff.</param>
 /// <param name="dx">Tensor pointers (in device memory) for the bottom layer's resulting
 /// differentials (data and means). Both share the same descriptor.</param>
 /// <param name="dMeans">Tensor pointers (in device memory) for the bottom layer's resulting
 /// differentials (data and means). Both share the same descriptor.</param>
 public void cudnnDivisiveNormalizationBackward(
     cudnnDivNormMode mode,
     float alpha,
     cudnnTensorDescriptor xDesc,                 // same desc for diff, means, temp, temp2
     CUdeviceptr x,
     CUdeviceptr means,                           // if NULL, means are assumed to be zero
     CUdeviceptr dy,
     CUdeviceptr temp,
     CUdeviceptr temp2,
     float beta,
     cudnnTensorDescriptor dXdMeansDesc,       // same desc for dest, means, meansDiff
     CUdeviceptr dx,                           // output data differential
     CUdeviceptr dMeans                        // output means differential, can be NULL
     )
 {
     res = CudaDNNNativeMethods.cudnnDivisiveNormalizationBackward(_handle, _desc, mode, ref alpha, xDesc, x, means, dy, temp, temp2, ref beta, dXdMeansDesc, dx, dMeans);
     Debug.Write("");//Line(String.Format("{0:G}, {1}: {2}", DateTime.Now, "cudnnDivisiveNormalizationBackward", res));
     if (res != cudnnStatus.Success)
     {
         throw new CudaDNNException(res);
     }
 }