static void Main(string[] args) { if (args.Length < 2) { Console.WriteLine("largefileuploader.exe <local file path> <container SAS string> <optional: upload chunk size in KB>"); Environment.Exit(1); } LargeFileUploaderUtils.Log = Console.Out.WriteLine; int chunkSizeKB = (args.Length > 2) ? int.Parse(args[2]) : 2048; LargeFileUploaderUtils.NumBytesPerChunk = chunkSizeKB * 1024; //LargeFileUploaderUtils.UploadAsync( // inputFile: @"C:\temp\watestsample.bak", // storageConnectionString: Environment.GetEnvironmentVariable("AZURE_STORAGE_CONNECTION_STRING", EnvironmentVariableTarget.Machine), // containerName: "bigfiles", // uploadParallelism: 2).Wait(); LargeFileUploaderUtils.UploadAsyncSAS( inputFile: args[0], containerSASString: args[1], uploadParallelism: 2).Wait(); //byte[] someData = Encoding.UTF8.GetBytes("Hallo"); //var address = someData.UploadAsync( // storageAccount: CloudStorageAccount.DevelopmentStorageAccount, // containerName: "dummy222222", // blobName: "somedata2.txt", // uploadParallelism: 1).Result; }
public static async Task <string> UploadAsync(Func <long, int, Task <byte[]> > fetchLocalData, long blobLenth, CloudBlockBlob blockBlob, uint uploadParallelism = DEFAULT_PARALLELISM) { const int MAXIMUM_UPLOAD_SIZE = 4 * MB; if (NumBytesPerChunk > MAXIMUM_UPLOAD_SIZE) { NumBytesPerChunk = MAXIMUM_UPLOAD_SIZE; } #region Which blocks exist in the file var allBlockInFile = Enumerable .Range(0, 1 + ((int)(blobLenth / NumBytesPerChunk))) .Select(_ => new BlockMetadata(_, blobLenth, NumBytesPerChunk)) .Where(block => block.Length > 0) .ToList(); var blockIdList = allBlockInFile.Select(_ => _.BlockId).ToList(); #endregion #region Which blocks are already uploaded List <BlockMetadata> missingBlocks = null; try { var existingBlocks = (await blockBlob.DownloadBlockListAsync( BlockListingFilter.Uncommitted, AccessCondition.GenerateEmptyCondition(), new BlobRequestOptions { }, new OperationContext { })) .Where(_ => _.Length == NumBytesPerChunk) .ToList(); missingBlocks = allBlockInFile.Where(blockInFile => !existingBlocks.Any(existingBlock => existingBlock.Name == blockInFile.BlockId && existingBlock.Length == blockInFile.Length)).ToList(); } catch (StorageException) { missingBlocks = allBlockInFile; } #endregion Func <BlockMetadata, Statistics, Task> uploadBlockAsync = async(block, stats) => { byte[] blockData = await fetchLocalData(block.Index, block.Length); string contentHash = md5()(blockData); DateTime start = DateTime.UtcNow; await ExecuteUntilSuccessAsync(async() => { await blockBlob.PutBlockAsync( blockId: block.BlockId, blockData: new MemoryStream(blockData, true), contentMD5: contentHash, accessCondition: AccessCondition.GenerateEmptyCondition(), options: new BlobRequestOptions { StoreBlobContentMD5 = true, UseTransactionalMD5 = true }, operationContext: new OperationContext()); }, consoleExceptionHandler); stats.Add(block.Length, start); }; var s = new Statistics(missingBlocks.Sum(b => (long)b.Length)); await LargeFileUploaderUtils.ForEachAsync( source : missingBlocks, parallelUploads : 4, body : blockMetadata => uploadBlockAsync(blockMetadata, s)); await ExecuteUntilSuccessAsync(async() => { await blockBlob.PutBlockListAsync(blockIdList); }, consoleExceptionHandler); log("PutBlockList succeeded, finished upload to {0}", blockBlob.Uri.AbsoluteUri); return(blockBlob.Uri.AbsoluteUri); }