public System.Threading.Tasks.Task<object> UploadFileAsync(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { Func<object> f = () => { long fileSize = fileStream.Length; if (uploadProgressChanged != null) { uploadProgressChanged(fileSize / 10); } if (cancellationToken != CancellationToken.None && cancellationToken.IsCancellationRequested) { return null; } object guid = UploadFileSync(fileName, fileStream); if (uploadProgressChanged != null) { uploadProgressChanged(fileSize); } return guid; }; fileStream.Position = 0; return System.Threading.Tasks.Task.Factory.StartNew(f, cancellationToken); }
private Uri UploadFileOnBlocks(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { const int NumberOfTasks = 2; const int NumberOfBlocks = 50; var settings = this.GetUploadFileSettings(); var blobClient = this.account.CreateCloudBlobClient(); var container = blobClient.GetContainerReference(settings.Container); container.CreateIfNotExists(); var blobName = this.GetBlobName(settings.Path, fileName); var blob = container.GetBlockBlobReference(blobName); int MinPartSize = (int)Math.Pow(2, 17); int MaxPartSize = (int)Math.Pow(2, 22); int partSize = Math.Min(MaxPartSize, Math.Max(MinPartSize, (int)(fileStream.Length / NumberOfBlocks))); BlocksSyncContext context = new BlocksSyncContext(blob, fileStream, partSize, uploadProgressChanged, cancellationToken); Task[] tasks = new Task[NumberOfTasks]; for (int i = 0; i < NumberOfTasks; i++) { Task task = System.Threading.Tasks.Task.Factory.StartNew(() => UploadBlocks(context), cancellationToken); task.ContinueWith(t => { if (t.IsFaulted) context.Faulted = true; }); tasks[i] = task; } Task.WaitAll(tasks, cancellationToken); // If the WaitAll throws, perhaps there are uncommitted blocks and the blocks have different id lengths, may be the uncommitted blocks need to be disposed first. // http://gauravmantri.com/2013/05/18/windows-azure-blob-storage-dealing-with-the-specified-blob-or-block-content-is-invalid-error/ var blockList = Enumerable.Range(1, context.Index).ToList<int>().ConvertAll(i => GenerateBlockId(i)); blob.PutBlockList(blockList); return blob.Uri; }
private object UploadFile(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { this.uploadFilesCount++; if (uploadFilesCount % 10 == 0) { throw new Exception("Simulate upload failure."); } var fileLength = fileStream.Length; int uploadIterations = this.random.Next(10, 15); int chunkSize = (int)(fileLength / uploadIterations); for (int i = 0; i < uploadIterations; i++) { cancellationToken.ThrowIfCancellationRequested(); uploadProgressChanged(i * chunkSize); Thread.Sleep(this.random.Next(80, 150)); } uploadProgressChanged(fileLength); lock (storage.StorageFiles) { Application.Current.Dispatcher.BeginInvoke(new Action(() => { storage.StorageFiles.Add(new StorageFile(fileName)); })); } return(fileName); }
internal BlocksSyncContext(CloudBlockBlob blob, Stream fileStream, int blockSize, CloudUploadFileProgressChanged progressChanged, CancellationToken cancellationToken) { this.Blob = blob; this.FileStream = fileStream; this.ProgressChanged = progressChanged; this.CancellationToken = cancellationToken; this.BlockSize = blockSize; this.FileStream.Position = 0; this.Index = 0; this.Locker = new object(); this.progressChangedLocker = new object(); }
public AmazonS3Context(IAmazonS3 client, string fileName, string existingBucketName, int partSize, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { this.fileName = fileName; this.bucketName = existingBucketName; this.partSize = partSize; this.fileStream = fileStream; this.uploadProgressChanged = uploadProgressChanged; this.cancellationToken = cancellationToken; this.client = client; this.fileStream.Position = 0; this.currentPartNumber = 1; this.fileSize = this.fileStream.Length; this.Init(); }
private object UploadFile(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { long fileLength = fileStream.Length; int uploadIterations = this.random.Next(10, 40); int chunkSize = (int)(fileLength / uploadIterations); for (int i = 0; i < uploadIterations; i++) { cancellationToken.ThrowIfCancellationRequested(); uploadProgressChanged(i * chunkSize); Thread.Sleep(this.random.Next(200, 900)); } uploadProgressChanged(fileLength); return fileName; }
private object UploadFile(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { long fileLength = fileStream.Length; int uploadIterations = this.random.Next(10, 40); int chunkSize = (int)(fileLength / uploadIterations); for (int i = 0; i < uploadIterations; i++) { cancellationToken.ThrowIfCancellationRequested(); uploadProgressChanged(i * chunkSize); Thread.Sleep(this.random.Next(200, 900)); } uploadProgressChanged(fileLength); return(fileName); }
private object UploadFileOnBlocks(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { int partSize = MinPartSize; string existingBucketName = GetUploadFileSettings().Bucket; var context = new AmazonS3Context(s3Client, fileName, existingBucketName, partSize, fileStream, uploadProgressChanged, cancellationToken); try { while (context.UploadNextPart()); context.CompleteMultipartUpload(); } catch (Exception e) { context.AbortMultipartUpload(); throw; } return fileName; }
private Uri UploadFileOnBlocks(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { const int NumberOfTasks = 2; const int NumberOfBlocks = 50; var settings = this.GetUploadFileSettings(); var blobClient = this.account.CreateCloudBlobClient(); var container = blobClient.GetContainerReference(settings.Container); container.CreateIfNotExists(); var blobName = this.GetBlobName(settings.Path, fileName); var blob = container.GetBlockBlobReference(blobName); int MinPartSize = (int)Math.Pow(2, 17); int MaxPartSize = (int)Math.Pow(2, 22); int partSize = Math.Min(MaxPartSize, Math.Max(MinPartSize, (int)(fileStream.Length / NumberOfBlocks))); BlocksSyncContext context = new BlocksSyncContext(blob, fileStream, partSize, uploadProgressChanged, cancellationToken); Task[] tasks = new Task[NumberOfTasks]; for (int i = 0; i < NumberOfTasks; i++) { Task task = System.Threading.Tasks.Task.Factory.StartNew(() => UploadBlocks(context), cancellationToken); task.ContinueWith(t => { if (t.IsFaulted) { context.Faulted = true; } }); tasks[i] = task; } Task.WaitAll(tasks, cancellationToken); // If the WaitAll throws, perhaps there are uncommitted blocks and the blocks have different id lengths, may be the uncommitted blocks need to be disposed first. // http://gauravmantri.com/2013/05/18/windows-azure-blob-storage-dealing-with-the-specified-blob-or-block-content-is-invalid-error/ var blockList = Enumerable.Range(1, context.Index).ToList <int>().ConvertAll(i => GenerateBlockId(i)); blob.PutBlockList(blockList); return(blob.Uri); }
private object UploadFileOnBlocks(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { int partSize = MinPartSize; string existingBucketName = GetUploadFileSettings().Bucket; var context = new AmazonS3Context(s3Client, fileName, existingBucketName, partSize, fileStream, uploadProgressChanged, cancellationToken); try { while (context.UploadNextPart()) { ; } context.CompleteMultipartUpload(); } catch (Exception e) { context.AbortMultipartUpload(); throw; } return(fileName); }
public System.Threading.Tasks.Task <object> UploadFileAsync(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { Func <object> f = () => { long fileSize = fileStream.Length; if (uploadProgressChanged != null) { uploadProgressChanged(fileSize / 10); } if (cancellationToken != CancellationToken.None && cancellationToken.IsCancellationRequested) { return(null); } object guid = UploadFileSync(fileName, fileStream); if (uploadProgressChanged != null) { uploadProgressChanged(fileSize); } return(guid); }; fileStream.Position = 0; return(System.Threading.Tasks.Task.Factory.StartNew(f, cancellationToken)); }
public Task<object> UploadFileAsync(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { return System.Threading.Tasks.Task.Factory.StartNew<object>(() => UploadFileOnBlocks(fileName, fileStream, uploadProgressChanged, cancellationToken), cancellationToken); }
public async Task <object> UploadFileAsync(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { using (var client = new HttpClient()) { client.BaseAddress = new Uri("http://webapifortelerikdemos.azurewebsites.net/"); using (var content = new MultipartFormDataContent()) using (var fileContent = new StreamContent(fileStream, (int)fileStream.Length)) { fileContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = fileName }; content.Add(fileContent); using (var message = await client.PostAsync("/api/Values", content, cancellationToken)) { var result = await message.Content.ReadAsStringAsync(); Debug.WriteLine($"Result - StatusCode: {message.StatusCode}, File Saved To: {result}"); // typical output // Result - StatusCode: 201, File Saved To: ["D:\\home\\site\\wwwroot\\Snag1.gif"] return(result); } } } }
public Task <object> UploadFileAsync(string fileName, Stream fileStream, CloudUploadFileProgressChanged uploadProgressChanged, CancellationToken cancellationToken) { return(System.Threading.Tasks.Task.Factory.StartNew <object>(() => UploadFileOnBlocks(fileName, fileStream, uploadProgressChanged, cancellationToken), cancellationToken)); }