protected override Task SerializeToStreamAsync(Stream stream, TransportContext context)
        {
            return(Task.Run(async() =>
            {
                var buffer = new byte[DefaultBufferSize];

                _uploadProgress?.ChangeState(UploadState.PendingUpload);

                using (_content)
                {
                    while (true)
                    {
                        var length = await _content.ReadAsync(buffer, 0, buffer.Length);
                        if (length <= 0)
                        {
                            break;
                        }

                        _uploadProgress?.UpdateUploaded(length);
                        Uploaded += length;

                        await stream.WriteAsync(buffer, 0, length);

                        _uploadProgress?.ChangeState(UploadState.Uploading);
                    }
                }

                _uploadProgress?.ChangeState(UploadState.PendingResponse);
            }));
        }
Esempio n. 2
0
        public async Task UploadFile(string folderName, string fileName, Stream stream)
        {
            await TestConnection();

            UploadProgress?.SetTotal(stream.Length);
            UploadProgress?.ChangeState(UploadState.PendingUpload);

            var url = await CreateNestedFoldersIfNeeded(folderName);

            url += $"/{fileName}";

            var request    = CreateFtpWebRequest(url, WebRequestMethods.Ftp.UploadFile, keepAlive: true);
            var readBuffer = new byte[DefaultBufferSize];

            int count;
            var requestStream = request.GetRequestStream();

            while ((count = await stream.ReadAsync(readBuffer, 0, readBuffer.Length)) != 0)
            {
                await requestStream.WriteAsync(readBuffer, 0, count);

                UploadProgress?.UpdateUploaded(count);
            }

            requestStream.Flush();
            requestStream.Close();

            UploadProgress?.ChangeState(UploadState.PendingResponse);
            var response = await request.GetResponseAsync();

            response.Dispose();

            UploadProgress?.ChangeState(UploadState.Done);
        }
Esempio n. 3
0
        private async Task <string> MultiPartUpload(string archiveDescription, Stream stream)
        {
            var streamLength = stream.Length;

            if (streamLength > MultiPartUploadLimitInBytes)
            {
                throw new InvalidOperationException(@"Can't upload more than 40TB to Amazon Glacier, " +
                                                    $"current upload size: {new Size(streamLength).HumaneSize}");
            }

            UploadProgress?.SetTotal(streamLength);
            UploadProgress?.ChangeType(UploadType.Chunked);

            // using a chunked upload we can upload up to 10,000 chunks, 4GB max each
            // we limit every chunk to a minimum of 128MB
            // constraints: the part size must be a megabyte(1024KB)
            // multiplied by a power of 2—for example,
            // 1048576(1MB), 2097152(2MB), 4194304(4MB), 8388608(8 MB), and so on.
            // the minimum allowable part size is 1MB and the maximum is 4GB(4096 MB).
            var        maxLengthPerPart      = Math.Max(MinOnePartUploadSizeLimitInBytes, stream.Length / 10000);
            const long maxPartLength         = 4L * 1024 * 1024 * 1024; // 4GB
            var        lengthPerPartPowerOf2 = Math.Min(GetNextPowerOf2(maxLengthPerPart), maxPartLength);

            var baseUrl  = $"{GetUrl()}/multipart-uploads";
            var uploadId = await GetUploadId(baseUrl, archiveDescription, lengthPerPartPowerOf2);

            var client = GetClient(TimeSpan.FromDays(7));

            var uploadUrl = $"{baseUrl}/{uploadId}";
            var fullStreamPayloadTreeHash = RavenAwsHelper.CalculatePayloadTreeHash(stream);

            try
            {
                while (stream.Position < streamLength)
                {
                    var length = Math.Min(lengthPerPartPowerOf2, streamLength - stream.Position);
                    await UploadPart(stream, client, uploadUrl, length, retryCount : 0);
                }

                return(await CompleteMultiUpload(uploadUrl, client, streamLength, fullStreamPayloadTreeHash));
            }
            catch (Exception)
            {
                await AbortMultiUpload(uploadUrl, client);

                throw;
            }
            finally
            {
                UploadProgress?.ChangeState(UploadState.Done);
            }
        }
Esempio n. 4
0
        public async Task <string> UploadArchive(Stream stream, string archiveDescription)
        {
            await TestConnection();

            if (stream.Length > MaxUploadArchiveSizeInBytes)
            {
                // for objects over 256MB
                return(await MultiPartUpload(archiveDescription, stream));
            }

            var url = $"{GetUrl()}/archives";

            var now = SystemTime.UtcNow;

            var payloadHash     = RavenAwsHelper.CalculatePayloadHash(stream);
            var payloadTreeHash = RavenAwsHelper.CalculatePayloadTreeHash(stream);

            UploadProgress?.SetTotal(stream.Length);

            // stream is disposed by the HttpClient
            var content = new ProgressableStreamContent(stream, UploadProgress)
            {
                Headers =
                {
                    { "x-amz-glacier-version",     "2012-06-01"                                         },
                    { "x-amz-date",                RavenAwsHelper.ConvertToString(now)                  },
                    { "x-amz-content-sha256",      payloadHash                                          },
                    { "x-amz-sha256-tree-hash",    payloadTreeHash                                      },
                    { "x-amz-archive-description", archiveDescription                                   },
                    { "Content-Length",            stream.Length.ToString(CultureInfo.InvariantCulture) }
                }
            };

            var headers = ConvertToHeaders(content.Headers);

            var client = GetClient(TimeSpan.FromHours(24));
            var authorizationHeaderValue = CalculateAuthorizationHeaderValue(HttpMethods.Post, url, now, headers);

            client.DefaultRequestHeaders.Authorization = authorizationHeaderValue;

            var response = await client.PostAsync(url, content, CancellationToken);

            UploadProgress?.ChangeState(UploadState.Done);
            if (response.IsSuccessStatusCode == false)
            {
                throw StorageException.FromResponseMessage(response);
            }

            return(ReadArchiveId(response));
        }
Esempio n. 5
0
        private async Task MultiPartUpload(string key, Stream stream, Dictionary <string, string> metadata)
        {
            var streamLength = stream.Length;

            if (streamLength > MultiPartUploadLimitInBytes)
            {
                throw new InvalidOperationException(@"Can't upload more than 5TB to Amazon S3, " +
                                                    $"current upload size: {new Size(streamLength).HumaneSize}");
            }

            UploadProgress?.SetTotal(streamLength);
            UploadProgress?.ChangeType(UploadType.Chunked);

            var baseUrl  = $"{GetUrl()}/{key}";
            var uploadId = await GetUploadId(baseUrl, metadata);

            var client = GetClient(TimeSpan.FromDays(7));
            var partNumbersWithEtag = new List <Tuple <int, string> >();
            var partNumber          = 0;
            var completeUploadUrl   = $"{baseUrl}?uploadId={uploadId}";

            // using a chunked upload we can upload up to 1000 chunks, 5GB max each
            // we limit every chunk to a minimum of 100MB
            var maxLengthPerPart = Math.Max(MinOnePartUploadSizeLimitInBytes, stream.Length / 1000);

            try
            {
                while (stream.Position < streamLength)
                {
                    var length = Math.Min(maxLengthPerPart, streamLength - stream.Position);
                    var url    = $"{baseUrl}?partNumber={++partNumber}&uploadId={uploadId}";

                    var etag = await UploadPart(stream, client, url, length, retryCount : 0);

                    partNumbersWithEtag.Add(new Tuple <int, string>(partNumber, etag));
                }

                await CompleteMultiUpload(completeUploadUrl, client, partNumbersWithEtag);
            }
            catch (Exception)
            {
                await AbortMultiUpload(client, completeUploadUrl);

                throw;
            }
            finally
            {
                UploadProgress?.ChangeState(UploadState.Done);
            }
        }
Esempio n. 6
0
        public async Task PutObject(string key, Stream stream, Dictionary <string, string> metadata)
        {
            await TestConnection();

            if (stream.Length > MaxUploadPutObjectSizeInBytes)
            {
                // for objects over 256MB
                await MultiPartUpload(key, stream, metadata);

                return;
            }

            var url         = $"{GetUrl()}/{key}";
            var now         = SystemTime.UtcNow;
            var payloadHash = RavenAwsHelper.CalculatePayloadHash(stream);

            UploadProgress?.SetTotal(stream.Length);

            // stream is disposed by the HttpClient
            var content = new ProgressableStreamContent(stream, UploadProgress)
            {
                Headers =
                {
                    { "x-amz-date",           RavenAwsHelper.ConvertToString(now) },
                    { "x-amz-content-sha256", payloadHash                         }
                }
            };

            foreach (var metadataKey in metadata.Keys)
            {
                content.Headers.Add("x-amz-meta-" + metadataKey.ToLower(), metadata[metadataKey]);
            }

            var headers = ConvertToHeaders(content.Headers);

            var client = GetClient(TimeSpan.FromHours(24));
            var authorizationHeaderValue = CalculateAuthorizationHeaderValue(HttpMethods.Put, url, now, headers);

            client.DefaultRequestHeaders.Authorization = authorizationHeaderValue;

            var response = await client.PutAsync(url, content, CancellationToken);

            UploadProgress?.ChangeState(UploadState.Done);
            if (response.IsSuccessStatusCode)
            {
                return;
            }

            throw StorageException.FromResponseMessage(response);
        }
Esempio n. 7
0
        public async Task PutBlob(string key, Stream stream, Dictionary <string, string> metadata)
        {
            await TestConnection();

            if (stream.Length > MaxUploadPutBlobInBytes)
            {
                // for blobs over 256MB
                await PutBlockApi(key, stream, metadata);

                return;
            }

            var url = _serverUrlForContainer + "/" + key;

            UploadProgress?.SetTotal(stream.Length);

            var now = SystemTime.UtcNow;
            // stream is disposed by the HttpClient
            var content = new ProgressableStreamContent(stream, UploadProgress)
            {
                Headers =
                {
                    { "x-ms-date",      now.ToString("R")                                    },
                    { "x-ms-version",   AzureStorageVersion                                  },
                    { "x-ms-blob-type", "BlockBlob"                                          },
                    { "Content-Length", stream.Length.ToString(CultureInfo.InvariantCulture) }
                }
            };

            foreach (var metadataKey in metadata.Keys)
            {
                content.Headers.Add("x-ms-meta-" + metadataKey.ToLower(), metadata[metadataKey]);
            }

            var client = GetClient(TimeSpan.FromHours(3));

            client.DefaultRequestHeaders.Authorization = CalculateAuthorizationHeaderValue(HttpMethods.Put, url, content.Headers);

            var response = await client.PutAsync(url, content, CancellationToken);

            UploadProgress?.ChangeState(UploadState.Done);
            if (response.IsSuccessStatusCode)
            {
                return;
            }

            throw StorageException.FromResponseMessage(response);
        }
Esempio n. 8
0
        private async Task PutBlockApi(string key, Stream stream, Dictionary <string, string> metadata)
        {
            var streamLength = stream.Length;

            if (streamLength > TotalBlocksSizeLimitInBytes)
            {
                throw new InvalidOperationException(@"Can't upload more than 4.75TB to Azure, " +
                                                    $"current upload size: {new Size(streamLength).HumaneSize}");
            }

            var blockNumber = 0;
            var blockIds    = new List <string>();
            var baseUrl     = _serverUrlForContainer + "/" + key;
            var client      = GetClient(TimeSpan.FromDays(7));

            UploadProgress?.SetTotal(streamLength);
            UploadProgress?.ChangeType(UploadType.Chunked);

            try
            {
                while (stream.Position < streamLength)
                {
                    var blockNumberInBytes = BitConverter.GetBytes(blockNumber++);
                    var blockIdString      = Convert.ToBase64String(blockNumberInBytes);
                    blockIds.Add(blockIdString);

                    var length           = Math.Min(OnePutBlockSizeLimitInBytes, streamLength - stream.Position);
                    var baseUrlForUpload = baseUrl + "?comp=block&blockid=";
                    var url = baseUrlForUpload + WebUtility.UrlEncode(blockIdString);

                    await PutBlock(stream, client, url, length, retryCount : 0);
                }

                // put block list
                await PutBlockList(baseUrl, client, blockIds, metadata);
            }
            finally
            {
                UploadProgress?.ChangeState(UploadState.Done);
            }
        }