public Task <IPointsRow[]> ReadRowsAsync(int firstRowIndex, IList <int> rowsIndexes, AsyncOperationInfo operationInfo) { var result = new IPointsRow[rowsIndexes.Count]; for (int i = 0; i < result.Length; i++) { result[i] = _base[rowsIndexes[i]]; } return(Task.FromResult(result)); }
async Task <IPointsRow[]> readRowsAsync(int firstRowIndex, IList <int> rowsIndexes, bool isIndexesSequential, AsyncOperationInfo operationInfo) { await ThreadingUtils.ContinueAtThreadPull(operationInfo); int DEGREE_OF_PARALLELISM = isIndexesSequential ? READER_DEGREE_OF_PARALLELISM : RANDOM_READER_DEGREE_OF_PARALLELISM; var futures = new Task[DEGREE_OF_PARALLELISM]; var workGroups = rowsIndexes.SplitOnGroups(DEGREE_OF_PARALLELISM); var offset = 0; var rows = new IPointsRow[rowsIndexes.Count]; for (int i = 0; i < DEGREE_OF_PARALLELISM; i++) { var work = workGroups[i]; futures[i] = readRowsTo(offset, work); offset += work.Length; } Task.WaitAll(futures); return(rows); async Task readRowsTo(int rowsArrayOffset, IList <int> indexes) { await ThreadingUtils.ContinueAtThreadPull(operationInfo); var reader = await _streams.AquireAsync(operationInfo); try { var rowBuffer = new byte[_numOfPointsInsideRow * sizeof(double)]; if (isIndexesSequential && indexes.Count > 0) { reader.BaseStream.Position = indexes[0] * _numOfPointsInsideRow * sizeof(double); } for (int i = 0; i < indexes.Count; i++) { if (!isIndexesSequential) { var index = indexes[i]; reader.BaseStream.Position = index * _numOfPointsInsideRow * sizeof(double); } var row = new double[_numOfPointsInsideRow]; reader.BaseStream.Read(rowBuffer, 0, rowBuffer.Length); // Hope the amount of data will be enough) for (int k = 0; k < _numOfPointsInsideRow; k++) { row[k] = readDoubleFast(rowBuffer, k * sizeof(double)); //row[k] = reader.ReadDouble(); // Bottleneck (15%) } rows[rowsArrayOffset + i] = new PointsRow(row); } } finally { await _streams.ReleaseAsync(reader, operationInfo); } } }