public FilePartSource(IPlatformFile file, IMD5HashProvider hashProvider) { fileHash = hashProvider; stream = file.OpenRead(); fileLength = file.Length; streamPosition = 0; partCount = 0; HasMore = true; }
public FilePartSource(Stream fileStream, IMD5HashProvider hashProvider, IBufferAllocator bufferAllocator, long offset) { fileHash = hashProvider; this.fileStream = fileStream; this.bufferAllocator = bufferAllocator; fileLength = fileStream.Length; streamPosition = offset; this.fileStream.Seek(streamPosition, SeekOrigin.Begin); partCount = 0; HasMore = true; }
public Task Upload(IPlatformFile file, IMD5HashProvider hashProvider, string chunkUploadUrl) { return(Task.Factory.StartNew(() => { var workers = Dispatch(new FilePartSource(file, hashProvider), chunkUploadUrl).ToArray(); Task.WaitAll(workers); var results = workers.Select(task => task.Result); if (!results.All(partUploadResult => partUploadResult.IsSuccess)) { throw results.Select(result => result.PartUploadException).FirstOrDefault(ex => ex != null) ?? new UploadException("FilePart upload failed", -1); } })); }
private async Task <ReadFileResult> ReadFile(int length) { IBuffer content = bufferAllocator.Allocate(length); Stream dest = content.GetStream(); IMD5HashProvider chunkHash = MD5HashProviderFactory.GetHashProvider().CreateHash(); byte[] b = ArrayPool <byte> .Shared.Rent(Configuration.BufferSize); try { int read = 0; int toRead = length; do { read = await fileStream.ReadAsync(b, offset : 0, count : Math.Min(b.Length, toRead)).ConfigureAwait(false); toRead -= read; await dest.WriteAsync(b, offset : 0, count : read).ConfigureAwait(false); chunkHash.Append(b, offset: 0, size: read); fileHash.Append(b, offset: 0, size: read); } while (read > 0 && toRead > 0); if (toRead > 0) { throw new Exception($"Expected to read {length} bytes, actual read {length - toRead} bytes"); } chunkHash.Finalize(ArrayPool <byte> .Shared.Rent(0), 0, 0); return(new ReadFileResult { Content = content, Hash = chunkHash.GetComputedHashAsString() }); } catch { content.Dispose(); throw; } finally { ArrayPool <byte> .Shared.Return(b); dest.Dispose(); } }
public async Task Upload( Stream fileStream, string fileName, IMD5HashProvider hashProvider, string chunkUploadUrl, bool raw, long offset = 0, CancellationToken cancellationToken = default(CancellationToken)) { this.raw = raw; // We block this after setting 'started', so make sure this statement is first NumberOfThreads = (int)Math.Min(NumberOfThreads, (fileStream.Length / partConfig.MinFileSizeForMultithreaded) + 1); started = true; if (offset != 0) { updateProgress(offset); completedBytes.Add(0, offset); } try { var filePartSource = new FilePartSource(fileStream, hashProvider, partConfig.BufferAllocator, offset); var workers = await Dispatch(filePartSource, chunkUploadUrl, fileName, cancellationToken).ConfigureAwait(false); await Task.WhenAll(workers).ConfigureAwait(false); logger.Info("[Scaling Uploader] All upload parts succeeded"); } catch (Exception innerException) { logger.Info("[Scaling Uploader] Upload failed. Bytes uploaded: " + LastConsecutiveByteUploaded); var uploadException = innerException as UploadException; UploadStatusCode statusCode = uploadException == null ? UploadStatusCode.Unknown : uploadException.StatusCode; throw new UploadException( "FilePart upload failed", statusCode, new ActiveUploadState(UploadSpecification, LastConsecutiveByteUploaded), innerException); } }
public async Task <string> Append(Stream chunk, CancellationToken cancellationToken) { IMD5HashProvider chunkHash = NewHash(); byte[] buffer = new byte[bufferSize]; int bytesRead = 0; await fileHashLock.WaitAsync(cancellationToken); try { for (long position = 0; position < chunk.Length; position += bytesRead) { bytesRead = await chunk.ReadAsync(buffer, 0, buffer.Length, cancellationToken); fileHash.Append(buffer, 0, bytesRead); chunkHash.Append(buffer, 0, bytesRead); } } finally { fileHashLock.Release(); } chunkHash.Finalize(zeroBuffer, 0, 0); return(chunkHash.GetComputedHashAsString()); }
public FileChunkHasher(IMD5HashProvider fileHash) { this.fileHash = fileHash; }