Exemple #1
0
        //TODO this is not actually used at the moment. It probably should be.
        // excludeDim may be -1 to not exclude any dimension
        /// <summary>
        /// Collapses the dims.
        /// </summary>
        /// <param name="tensor">The tensor.</param>
        /// <param name="excludeDim">The exclude dim.</param>
        /// <param name="info">The information.</param>
        /// <returns>System.Int32.</returns>
        /// <exception cref="ArgumentException">excludeDim must equal -1 if all dims are of size 1 - excludeDim</exception>
        public static int CollapseDims(Tensor tensor, int excludeDim, out TensorInfo info)
        {
            info.buffer = CudaHelpers.GetBufferStart(tensor);
            var firstNonOneDim = GetInnermostNon1Dim(tensor.Shape, excludeDim);

            // If all dims are size 1 (ie. tensor contains 1 element)
            if (firstNonOneDim == -1)
            {
                if (excludeDim != -1)
                {
                    throw new ArgumentException("excludeDim must equal -1 if all dims are of size 1", "excludeDim");
                }

                info.sizes   = new long[] { 1 };
                info.strides = new long[] { 1 };
                return(0);
            }


            // Count the number of successive dimensions that can be collapsed, from
            // innermost to outermost.
            int numCollapsed = 0;

            // Skip the leading size 1 dims
            numCollapsed += tensor.DimensionCount - 1 - firstNonOneDim;

            // We perform one pass through to determine how many dimensions we
            // can collapse, before calculating the actual size of the collapsed
            // dimensions.
            // size/strideInner are the size/strides of the previous inner
            // non-collapsible dim we encounter.
            var sizeInner   = tensor.Shape[firstNonOneDim];
            var strideInner = tensor.Strides[firstNonOneDim];

            for (int i = firstNonOneDim - 1; i >= 0; --i)
            {
                var sizeOuter   = tensor.Shape[i];
                var strideOuter = tensor.Strides[i];

                // Don't collapse this dimension if we want to exclude it from
                // collapsing.
                // Since this code is attempting to collapse a subsequent
                // dimension (i) with the preceding dimension (i + 1), we can only
                // perform collapsing if the preceding dimension can be collapsed
                // (i.e., not excludeDim)
                if ((excludeDim != i) && (excludeDim != i + 1))
                {
                    // The next outermost dimension can be skipped if size 1
                    if (sizeOuter == 1)
                    {
                        ++numCollapsed;
                        continue;
                    }

                    // If the next outermost dimension is contiguous with the
                    // previous non-collapsed one, collapse it
                    if (strideOuter == strideInner * sizeInner)
                    {
                        ++numCollapsed;

                        // This is the run of collapsed dimensions' size
                        sizeInner = sizeInner * sizeOuter;
                        continue;
                    }
                }

                // Otherwise, this new outer dimension at `i` cannot be collapsed
                // because it is excluded from collapsing, or it is not contiguous
                // with the previous inner dimension.
                sizeInner   = sizeOuter;
                strideInner = strideOuter;
            }

            // This will be our new size/stride and dimension.
            var newSizes   = new long[TSCudaContext.MaxDims];
            var newStrides = new long[TSCudaContext.MaxDims];

            int newDims = tensor.DimensionCount - numCollapsed;

            // We return the index of the excluded dimension that is excluded
            // from being collapsed here.
            int returnDim = -1;

            // We perform a second pass through the dimensions to actually
            // calculate the size of the collapsed dimensions.
            int collapsedIndex = tensor.DimensionCount - numCollapsed - 1;

            newSizes[collapsedIndex]   = tensor.Shape[firstNonOneDim];
            newStrides[collapsedIndex] = tensor.Strides[firstNonOneDim];

            if (firstNonOneDim == excludeDim)
            {
                returnDim = collapsedIndex;
            }

            for (int i = firstNonOneDim - 1; i >= 0; --i)
            {
                var sizeOuter   = tensor.Shape[i];
                var strideOuter = tensor.Strides[i];

                if ((excludeDim != i) && (excludeDim != i + 1))
                {
                    if (sizeOuter == 1)
                    {
                        // skip
                        continue;
                    }

                    if (strideOuter == newSizes[collapsedIndex] * newStrides[collapsedIndex])
                    {
                        // collapse
                        newSizes[collapsedIndex] *= sizeOuter;
                        continue;
                    }
                }

                // Otherwise, strides don't match, or dim `i` is excluded from
                // collapsing.
                --collapsedIndex;
                //assert(collapsedIndex >= 0);
                //assert(collapsedIndex < newDims);
                newSizes[collapsedIndex]   = sizeOuter;
                newStrides[collapsedIndex] = strideOuter;

                if (excludeDim == i)
                {
                    returnDim = collapsedIndex;
                }
            }

            info.sizes   = newSizes.Take(newDims).ToArray();
            info.strides = newStrides.Take(newDims).ToArray();
            return(returnDim);
        }
Exemple #2
0
        private static CudaDeviceVariable <float> GetDeviceVar(Tensor tensor)
        {
            var ptr = CudaHelpers.GetBufferStart(tensor);

            return(new CudaDeviceVariable <float>(ptr, false, 0));// set size to 0 because we never end up using it
        }