public Tensor GetInputAsTensor(int idx = 0, int batchCount = -1, int fromBatch = 0) { if (rawTestSet != null) { throw new Exception("GetInputAsTensor is not supported for RAW test suites"); } var shape = GetInputShape(idx); var array = GetInputData(idx); var maxBatchCount = array.Length / (shape[1] * shape[2] * shape[3]); fromBatch = Math.Min(fromBatch, maxBatchCount - 1); if (batchCount < 0) { batchCount = maxBatchCount - fromBatch; } // pad data with 0s, if test-set doesn't have enough batches: // 1) new ArrayTensorData() will initialize to 0 // 2) Upload will copy as much data as test-set has into ArrayTensorData var tensorShape = new TensorShape(batchCount, shape[1], shape[2], shape[3]); var data = new ArrayTensorData(tensorShape.length); data.Upload(array, fromBatch * tensorShape.flatWidth, Math.Min(batchCount, maxBatchCount - fromBatch) * tensorShape.flatWidth); var res = new Tensor(tensorShape, data); res.name = GetInputName(idx); return(res); }
/// <summary> /// Get input as `Tensor` /// </summary> /// <param name="idx">input index</param> /// <param name="batchCount">max batch count</param> /// <param name="fromBatch">start from batch</param> /// <returns>`Tensor`</returns> /// <exception cref="Exception">thrown if called on raw test set (only JSON test set is supported)</exception> public Tensor GetInputAsTensor(int idx = 0, int batchCount = -1, int fromBatch = 0) { if (rawTestSet != null) { throw new Exception("GetInputAsTensor is not supported for RAW test suites"); } TensorShape shape = GetInputShape(idx); Assert.IsTrue(shape.sequenceLength == 1 && shape.numberOfDirections == 1); var array = GetInputData(idx); var maxBatchCount = array.Length / shape.flatWidth; fromBatch = Math.Min(fromBatch, maxBatchCount - 1); if (batchCount < 0) { batchCount = maxBatchCount - fromBatch; } // pad data with 0s, if test-set doesn't have enough batches var shapeArray = shape.ToArray(); shapeArray[TensorShape.DataBatch] = batchCount; var tensorShape = new TensorShape(shapeArray); var managedBufferStartIndex = fromBatch * tensorShape.flatWidth; var count = Math.Min(batchCount, maxBatchCount - fromBatch) * tensorShape.flatWidth; float[] dataToUpload = new float[tensorShape.length]; Array.Copy(array, managedBufferStartIndex, dataToUpload, 0, count); var data = new ArrayTensorData(tensorShape.length); data.Upload(dataToUpload, tensorShape, 0); var res = new Tensor(tensorShape, data); res.name = GetInputName(idx); res.name = res.name.EndsWith(":0") ? res.name.Remove(res.name.Length - 2) : res.name; return(res); }