private void addResponse(UploadPartResponse response) { lock (this.WAIT_FOR_COMPLETION_LOCK) { this._uploadResponses.Add(response); } }
static void UploadPart() { try { UploadPartRequest request = new UploadPartRequest() { BucketName = bucketName, ObjectKey = objectName, FilePath = filePath, PartNumber = 1, PartSize = partSize, UploadId = uploadId, Offset = 100, }; UploadPartResponse response = client.UploadPart(request); Console.WriteLine("UploadPart response: {0}", response.StatusCode); Console.WriteLine("ETag: {0}", response.ETag); etag = response.ETag; } catch (ObsException ex) { Console.WriteLine("Exception errorcode: {0}, when upload part.", ex.ErrorCode); Console.WriteLine("Exception errormessage: {0}", ex.ErrorMessage); } }
public override AmazonWebServiceResponse Unmarshall(XmlUnmarshallerContext context) { UploadPartResponse response = new UploadPartResponse(); UnmarshallResult(context, response); return(response); }
/// <summary> /// Helper to assert that uploaded parts have the same checksum as listed parts. /// Genearlly only one checksum is expected to be set. /// </summary> /// <param name="uploadPartResponse">Response after uploading a part</param> /// <param name="partDetail">Response for a single part after listing parts</param> private static void AssertPartsAreEqual(UploadPartResponse uploadPartResponse, PartDetail partDetail) { Assert.AreEqual(uploadPartResponse.PartNumber, partDetail.PartNumber); Assert.AreEqual(uploadPartResponse.ETag, partDetail.ETag); Assert.AreEqual(uploadPartResponse.ChecksumCRC32C, partDetail.ChecksumCRC32C); Assert.AreEqual(uploadPartResponse.ChecksumCRC32, partDetail.ChecksumCRC32); Assert.AreEqual(uploadPartResponse.ChecksumSHA1, partDetail.ChecksumSHA1); Assert.AreEqual(uploadPartResponse.ChecksumSHA256, partDetail.ChecksumSHA256); }
private static async Task <PartETag> ProcessChunk(UploadPartRequest upr) { Console.WriteLine(string.Format("Sending chunk {0} starting at position {1}", upr.PartNumber, upr.FilePosition)); // upload the chucnk and return a new PartETag when upload completes UploadPartResponse response = await AWSS3Factory.getS3Client().UploadPartAsync(upr, new System.Threading.CancellationToken()); return(new PartETag(response.PartNumber, response.ETag)); }
/// <summary> /// 结束对上传段的异步请求。 /// </summary> /// <param name="ar">异步请求的响应结果。</param> /// <returns>上传段的响应结果。</returns> public UploadPartResponse EndUploadPart(IAsyncResult ar) { UploadPartResponse response = this.EndDoRequest <UploadPartRequest, UploadPartResponse>(ar); HttpObsAsyncResult result = ar as HttpObsAsyncResult; UploadPartRequest request = result.AdditionalState as UploadPartRequest; response.PartNumber = request.PartNumber; return(response); }
public void RegisterSuccess(int partNumber, UploadPartResponse response) { var part = _parts[partNumber]; part.Details = new CommitMultipartUploadPartDetails() { Etag = response.ETag, PartNum = partNumber }; part.Complete = true; }
public async Task MultipartWithEncryption(S3Provider provider, string bucket, ISimpleClient client, SseAlgorithm algorithm) { string objectKey = nameof(MultipartWithEncryption); CreateMultipartUploadResponse createResp = await client.CreateMultipartUploadAsync(bucket, objectKey, req => req.SseAlgorithm = algorithm).ConfigureAwait(false); Assert.Equal(200, createResp.StatusCode); if (provider == S3Provider.AmazonS3) { Assert.Equal(algorithm, createResp.SseAlgorithm); } if (algorithm == SseAlgorithm.AwsKms) { Assert.NotNull(createResp.SseKmsKeyId); } await using MemoryStream ms = new MemoryStream(new byte[1024 * 1024 * 5]); UploadPartResponse uploadResp = await client.UploadPartAsync(bucket, objectKey, 1, createResp.UploadId, ms).ConfigureAwait(false); Assert.Equal(200, uploadResp.StatusCode); if (provider == S3Provider.AmazonS3) { Assert.Equal(algorithm, uploadResp.SseAlgorithm); } if (algorithm == SseAlgorithm.AwsKms) { Assert.NotNull(uploadResp.SseKmsKeyId); } CompleteMultipartUploadResponse completeResp = await client.CompleteMultipartUploadAsync(bucket, objectKey, createResp.UploadId, new[] { uploadResp }).ConfigureAwait(false); Assert.Equal(200, completeResp.StatusCode); if (provider == S3Provider.AmazonS3) { Assert.Equal(algorithm, completeResp.SseAlgorithm); } if (algorithm == SseAlgorithm.AwsKms) { Assert.NotNull(completeResp.SseKmsKeyId); } }
private static void UnmarshallResult(XmlUnmarshallerContext context, UploadPartResponse response) { IWebResponseData responseData = context.ResponseData; if (responseData.IsHeaderPresent("x-amz-server-side-encryption")) { response.ServerSideEncryptionMethod = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-server-side-encryption")); } if (responseData.IsHeaderPresent("ETag")) { response.ETag = S3Transforms.ToString(responseData.GetHeaderValue("ETag")); } return; }
public async Task MultipartUpload() { string objectKey = nameof(MultipartUpload); CreateMultipartUploadResponse initResp = await MultipartClient.CreateMultipartUploadAsync(BucketName, objectKey).ConfigureAwait(false); Assert.True(initResp.IsSuccess); Assert.Equal(BucketName, initResp.Bucket); Assert.Equal(objectKey, initResp.ObjectKey); Assert.NotNull(initResp.UploadId); byte[] file = new byte[1024 * 1024 * 10]; file[0] = (byte)'a'; file[file.Length - 1] = (byte)'b'; byte[][] parts = file.Chunk(file.Length / 2).Select(x => x.ToArray()).ToArray(); UploadPartResponse uploadResp1 = await MultipartClient.UploadPartAsync(BucketName, objectKey, 1, initResp.UploadId, new MemoryStream(parts[0])).ConfigureAwait(false); Assert.True(uploadResp1.IsSuccess); Assert.NotNull(uploadResp1.ETag); UploadPartResponse uploadResp2 = await MultipartClient.UploadPartAsync(BucketName, objectKey, 2, initResp.UploadId, new MemoryStream(parts[1])).ConfigureAwait(false); Assert.True(uploadResp2.IsSuccess); Assert.NotNull(uploadResp2.ETag); CompleteMultipartUploadResponse completeResp = await MultipartClient.CompleteMultipartUploadAsync(BucketName, objectKey, initResp.UploadId, new[] { uploadResp1, uploadResp2 }).ConfigureAwait(false); Assert.True(completeResp.IsSuccess); Assert.NotNull(uploadResp2.ETag); GetObjectResponse getResp = await ObjectClient.GetObjectAsync(BucketName, objectKey).ConfigureAwait(false); //Provoke an 'InvalidArgument' error GetObjectResponse gResp1 = await ObjectClient.GetObjectAsync(BucketName, nameof(MultipartUpload), req => req.PartNumber = 0).ConfigureAwait(false); Assert.False(gResp1.IsSuccess); Assert.IsType <InvalidArgumentError>(gResp1.Error); GetObjectResponse gResp2 = await ObjectClient.GetObjectAsync(BucketName, nameof(MultipartUpload), req => req.PartNumber = 1).ConfigureAwait(false); Assert.True(gResp2.IsSuccess); Assert.Equal(file.Length / 2, (await gResp2.Content.AsDataAsync().ConfigureAwait(false)).Length); Assert.Equal(file, await getResp.Content.AsDataAsync().ConfigureAwait(false)); }
private static void UnmarshallResult(XmlUnmarshallerContext context, UploadPartResponse response) { IWebResponseData responseData = context.get_ResponseData(); if (responseData.IsHeaderPresent("x-amz-server-side-encryption")) { response.ServerSideEncryptionMethod = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-server-side-encryption")); } if (responseData.IsHeaderPresent("ETag")) { response.ETag = S3Transforms.ToString(responseData.GetHeaderValue("ETag")); } if (responseData.IsHeaderPresent(S3Constants.AmzHeaderRequestCharged)) { response.RequestCharged = RequestCharged.FindValue(responseData.GetHeaderValue(S3Constants.AmzHeaderRequestCharged)); } }
private static async Task UploadPartAsync(TaskCompletionSource <UploadPartResponse> completionSource, IMultipartOperations operations, string bucketName, string objectKey, byte[] data, int length, int partNumber, string uploadId, SemaphoreSlim semaphore, CancellationToken token) { try { using (MemoryStream ms = new MemoryStream(data, 0, length)) { UploadPartResponse result = await operations.UploadPartAsync(new UploadPartRequest(bucketName, objectKey, partNumber, uploadId, ms), token) .ConfigureAwait(false); completionSource.SetResult(result); } } catch (Exception ex) { completionSource.SetException(ex); } finally { semaphore.Release(); } }
public async Task MultipartSinglePart(S3Provider provider, string bucket, ISimpleClient client) { string objectKey = nameof(MultipartSinglePart); CreateMultipartUploadResponse createResp = await client.CreateMultipartUploadAsync(bucket, objectKey).ConfigureAwait(false); Assert.Equal(200, createResp.StatusCode); Assert.Equal(bucket, createResp.BucketName); Assert.Equal(objectKey, createResp.ObjectKey); Assert.NotNull(createResp.UploadId); if (provider == S3Provider.AmazonS3) { //Test lifecycle expiration Assert.Equal(DateTime.UtcNow.AddDays(2).Date, createResp.AbortsOn !.Value.UtcDateTime.Date); Assert.Equal("ExpireAll", createResp.AbortRuleId); } byte[] file = new byte[1024 * 1024 * 5]; file[0] = (byte)'a'; await using MemoryStream ms = new MemoryStream(file); UploadPartResponse uploadResp = await client.UploadPartAsync(bucket, objectKey, 1, createResp.UploadId, ms).ConfigureAwait(false); Assert.Equal(200, uploadResp.StatusCode); Assert.Equal("\"10f74ef02085310ccd1f87150b83e537\"", uploadResp.ETag); CompleteMultipartUploadResponse completeResp = await client.CompleteMultipartUploadAsync(bucket, objectKey, createResp.UploadId, new[] { uploadResp }).ConfigureAwait(false); Assert.Equal(200, completeResp.StatusCode); Assert.Equal("\"bd74e21dfa8678d127240f76e518e9c2-1\"", completeResp.ETag); if (provider == S3Provider.AmazonS3) { //Test lifecycle expiration Assert.Equal(DateTime.UtcNow.AddDays(2).Date, completeResp.LifeCycleExpiresOn !.Value.UtcDateTime.Date); Assert.Equal("ExpireAll", completeResp.LifeCycleRuleId); } }
public async Task MultipartLockMode() { string objectKey = nameof(MultipartLockMode); CreateMultipartUploadResponse initResp = await MultipartClient.CreateMultipartUploadAsync(BucketName, objectKey, req => { req.LockMode = LockMode.Governance; req.LockRetainUntil = DateTimeOffset.UtcNow.AddMinutes(5); }).ConfigureAwait(false); Assert.True(initResp.IsSuccess); byte[] file = new byte[1024 * 1024 * 5]; UploadPartResponse uploadResp = await MultipartClient.UploadPartAsync(BucketName, objectKey, 1, initResp.UploadId, new MemoryStream(file), req => req.ContentMd5 = CryptoHelper.Md5Hash(file)).ConfigureAwait(false); Assert.True(uploadResp.IsSuccess); CompleteMultipartUploadResponse completeResp = await MultipartClient.CompleteMultipartUploadAsync(BucketName, objectKey, initResp.UploadId, new[] { uploadResp }).ConfigureAwait(false); Assert.True(completeResp.IsSuccess); }
public async Task MultipartRequestPayer() { string objectKey = nameof(MultipartRequestPayer); CreateMultipartUploadResponse initResp = await MultipartClient.CreateMultipartUploadAsync(BucketName, objectKey, req => req.RequestPayer = Payer.Requester).ConfigureAwait(false); Assert.True(initResp.RequestCharged); byte[] file = new byte[1024 * 1024 * 5]; UploadPartResponse uploadResp = await MultipartClient.UploadPartAsync(BucketName, objectKey, 1, initResp.UploadId, new MemoryStream(file), req => req.RequestPayer = Payer.Requester).ConfigureAwait(false); Assert.True(uploadResp.RequestCharged); ListPartsResponse listResp = await MultipartClient.ListPartsAsync(BucketName, objectKey, initResp.UploadId, req => req.RequestPayer = Payer.Requester).ConfigureAwait(false); Assert.True(listResp.RequestCharged); CompleteMultipartUploadResponse completeResp = await MultipartClient.CompleteMultipartUploadAsync(BucketName, objectKey, initResp.UploadId, new[] { uploadResp }, req => req.RequestPayer = Payer.Requester).ConfigureAwait(false); Assert.True(completeResp.RequestCharged); }
private static void UnmarshallResult(XmlUnmarshallerContext context, UploadPartResponse response) { IWebResponseData responseData = context.ResponseData; if (responseData.IsHeaderPresent("x-amz-server-side-encryption")) { response.ServerSideEncryptionMethod = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-server-side-encryption")); } if (responseData.IsHeaderPresent("ETag")) { response.ETag = S3Transforms.ToString(responseData.GetHeaderValue("ETag")); } if (responseData.IsHeaderPresent(S3Constants.AmzHeaderRequestCharged)) { response.RequestCharged = RequestCharged.FindValue(responseData.GetHeaderValue(S3Constants.AmzHeaderRequestCharged)); } if (responseData.IsHeaderPresent(S3Constants.AmzHeaderBucketKeyEnabled)) { response.BucketKeyEnabled = S3Transforms.ToBool(responseData.GetHeaderValue(S3Constants.AmzHeaderBucketKeyEnabled)); } if (context.ResponseData.IsHeaderPresent("x-amz-checksum-crc32")) { response.ChecksumCRC32 = S3Transforms.ToString(context.ResponseData.GetHeaderValue("x-amz-checksum-crc32")); } if (context.ResponseData.IsHeaderPresent("x-amz-checksum-crc32c")) { response.ChecksumCRC32C = S3Transforms.ToString(context.ResponseData.GetHeaderValue("x-amz-checksum-crc32c")); } if (context.ResponseData.IsHeaderPresent("x-amz-checksum-sha1")) { response.ChecksumSHA1 = S3Transforms.ToString(context.ResponseData.GetHeaderValue("x-amz-checksum-sha1")); } if (context.ResponseData.IsHeaderPresent("x-amz-checksum-sha256")) { response.ChecksumSHA256 = S3Transforms.ToString(context.ResponseData.GetHeaderValue("x-amz-checksum-sha256")); } return; }
/// <summary> /// Upload a part. /// </summary> /// <param name="request">Parameters in a request for uploading a part</param> /// <returns>Response to a part upload request</returns> public UploadPartResponse UploadPart(UploadPartRequest request) { UploadPartResponse response = this.DoRequest <UploadPartRequest, UploadPartResponse>(request, delegate() { if (request.ObjectKey == null) { throw new ObsException(Constants.InvalidObjectKeyMessage, ErrorType.Sender, Constants.InvalidObjectKey, ""); } if (string.IsNullOrEmpty(request.UploadId)) { throw new ObsException(Constants.InvalidUploadIdMessage, ErrorType.Sender, Constants.InvalidUploadId, ""); } if (request.PartNumber <= 0) { throw new ObsException(Constants.InvalidPartNumberMessage, ErrorType.Sender, Constants.InvalidPartNumber, ""); } }); response.PartNumber = request.PartNumber; return(response); }
public async Task MultipartLockMode(S3Provider _, string bucket, ISimpleClient client) { string objectKey = nameof(MultipartLockMode); CreateMultipartUploadResponse initResp = await client.CreateMultipartUploadAsync(bucket, objectKey, r => { r.LockMode = LockMode.Governance; r.LockRetainUntil = DateTimeOffset.UtcNow.AddMinutes(5); }).ConfigureAwait(false); Assert.Equal(200, initResp.StatusCode); byte[] file = new byte[1024 * 1024 * 5]; await using MemoryStream ms = new MemoryStream(file); UploadPartResponse uploadResp = await client.UploadPartAsync(bucket, objectKey, 1, initResp.UploadId, ms, r => r.ContentMd5 = CryptoHelper.Md5Hash(file)).ConfigureAwait(false); Assert.Equal(200, uploadResp.StatusCode); CompleteMultipartUploadResponse completeResp = await client.CompleteMultipartUploadAsync(bucket, objectKey, initResp.UploadId, new[] { uploadResp }).ConfigureAwait(false); Assert.Equal(200, completeResp.StatusCode); }
public void TestOrdinary() { String uploadId = this.client.InitiateMultipartUpload(this.bucketName, "test").UploadId; UploadPartResponse response = this.client.UploadPart(new UploadPartRequest() { BucketName = this.bucketName, Key = "test", UploadId = uploadId, PartNumber = 1, PartSize = 4, InputStream = new MemoryStream(Encoding.Default.GetBytes("data")) }); Assert.AreEqual(response.PartNumber, 1); Assert.IsNotNull(response.ETag); List <PartSummary> parts = this.client.ListParts(this.bucketName, "test", uploadId).Parts; Assert.AreEqual(parts.Count, 1); PartSummary part = parts[0]; Assert.IsNotNull(part); Assert.AreEqual(part.ETag, response.ETag); Assert.AreEqual(part.Size, 4L); }
public void BasicUploadLargeFile() { byte[] part1 = new byte[0x500000]; // 5MB byte[] part2 = new byte[0x500000]; // 5MB byte[] part3 = new byte[0x10000]; // 64K FillByteArray(part1); FillByteArray(part2); FillByteArray(part3); string[] sha1Array = new string[3]; using (SHA1Cng sha1 = new SHA1Cng()) { sha1Array[0] = BitConverter.ToString(sha1.ComputeHash(part1)).Replace("-", "").ToLowerInvariant(); sha1Array[1] = BitConverter.ToString(sha1.ComputeHash(part2)).Replace("-", "").ToLowerInvariant(); sha1Array[2] = BitConverter.ToString(sha1.ComputeHash(part3)).Replace("-", "").ToLowerInvariant(); } string filename = Guid.NewGuid().ToString("N") + ".txt"; using (BackblazeB2Client client = CreateClient()) { StartLargeFileResponse startLargeFileResponse = client.StartLargeUpload(accountInfo.BucketId, filename).Result; GetUploadPartUrlResponse getPartUploadResponse = client.GetUploadPartUrl(startLargeFileResponse.FileId).Result; using (MemoryStream memoryStream = new MemoryStream(part1)) { UploadPartResponse partUploadResponse = client.UploadPart( getPartUploadResponse.UploadUrl, getPartUploadResponse.AuthorizationToken, 1, sha1Array[0], part1.Length, memoryStream) .Result; Assert.AreEqual(5242880, int.Parse(partUploadResponse.ContentLength)); Assert.AreEqual(1, Int32.Parse(partUploadResponse.PartNumber)); Assert.AreEqual(sha1Array[0], partUploadResponse.ContentSha1); } using (MemoryStream memoryStream = new MemoryStream(part2)) { UploadPartResponse partUploadResponse = client.UploadPart( getPartUploadResponse.UploadUrl, getPartUploadResponse.AuthorizationToken, 2, sha1Array[1], part2.Length, memoryStream) .Result; Assert.AreEqual(5242880, int.Parse(partUploadResponse.ContentLength)); Assert.AreEqual(2, int.Parse(partUploadResponse.PartNumber)); Assert.AreEqual(sha1Array[1], partUploadResponse.ContentSha1); } using (MemoryStream memoryStream = new MemoryStream(part3)) { UploadPartResponse partUploadResponse = client.UploadPart( getPartUploadResponse.UploadUrl, getPartUploadResponse.AuthorizationToken, 3, sha1Array[2], part3.Length, memoryStream) .Result; Assert.AreEqual(65536, int.Parse(partUploadResponse.ContentLength)); Assert.AreEqual(3, int.Parse(partUploadResponse.PartNumber)); Assert.AreEqual(sha1Array[2], partUploadResponse.ContentSha1); } FinishLargeFileResponse finishLargeFileResponse = client.FinishLargeFile( startLargeFileResponse.FileId, sha1Array) .Result; Assert.AreEqual(accountInfo.BucketId, finishLargeFileResponse.BucketId); Assert.AreEqual(10551296, finishLargeFileResponse.ContentLength); } }
/// <summary> /// Saves a NEW resource. Only saves if resource with the MD5Sum has not previously been added. /// </summary> /// <param name="referenceRepository"></param> /// <param name="fileStream"></param> /// <param name="originalName"></param> /// <returns>the AWS Upload ID</returns> public async Task <string> UploadPartial(ResourceModel.User owner, string md5OfResource, string uploadIdentifier, Stream fileStream, int partNumber, int numberOfParts) { ResourceModel.DigitalResource resource = null; //User user = reference try { //bool exists = Exists(md5Sum); DigitalResource existingResource = Get(md5OfResource); if (existingResource == null) { //fileStream.Position = 0; //TransferUtilityUploadRequest tuu = new TransferUtilityUploadRequest //{ // InputStream = fileStream, // BucketName = "piccoli", // Key = "belvedere" //}; //tr.UploadAsync(tuu); //upload the file IAmazonS3 s3Client = new AmazonS3Client(); //ListBucketsResponse response = s3Client.ListBuckets(); //https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/S3/TUploadPartResponse.html //https://docs.aws.amazon.com/AmazonS3/latest/dev/LLuploadFileDotNet.html string uploadID = uploadIdentifier; if (partNumber == 1) { InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest { BucketName = "piccoli", Key = md5OfResource, }; // Initiate the upload. InitiateMultipartUploadResponse initResponse = await s3Client.InitiateMultipartUploadAsync(initiateRequest); uploadID = initResponse.UploadId; } // Upload part X UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = "piccoli", Key = md5OfResource, UploadId = uploadID, PartNumber = partNumber, PartSize = fileStream.Length, //5242880,//5MB ? InputStream = fileStream }; UploadPartResponse response = s3Client.UploadPart(uploadRequest); return(uploadID); } return("resource already exists"); } catch (AmazonS3Exception ex) { throw ex; } }
public async Task <CompleteMultipartUploadResponse> MultipartUploadAsync(CreateMultipartUploadRequest req, Stream data, int partSize = 16777216, int numParallelParts = 4, Action <UploadPartResponse>?onPartResponse = null, CancellationToken token = default) { Validator.RequireNotNull(req, nameof(req)); Validator.RequireNotNull(data, nameof(data)); foreach (IRequestWrapper wrapper in _requestWrappers) { if (wrapper.IsSupported(req)) { data = wrapper.Wrap(data, req); } } string bucket = req.BucketName; string objectKey = req.ObjectKey; byte[]? encryptionKey = null; try { if (req.SseCustomerKey != null) { encryptionKey = new byte[req.SseCustomerKey.Length]; Array.Copy(req.SseCustomerKey, 0, encryptionKey, 0, encryptionKey.Length); } CreateMultipartUploadResponse initResp = await _multipartOperations.CreateMultipartUploadAsync(req, token).ConfigureAwait(false); if (token.IsCancellationRequested) { return new CompleteMultipartUploadResponse { BucketName = bucket, ObjectKey = objectKey } } ; if (!initResp.IsSuccess) { throw new S3RequestException(initResp, "CreateMultipartUploadRequest was unsuccessful"); } IEnumerable <ArraySegment <byte> > chunks = ReadChunks(data, partSize); int partNumber = 0; IEnumerable <UploadPartResponse> responses = await ParallelHelper.ExecuteAsync(chunks, async (bytes, innerToken) => { Interlocked.Increment(ref partNumber); using (MemoryStream ms = new MemoryStream(bytes.Array !, 0, bytes.Count)) { UploadPartResponse resp = await _multipartClient.UploadPartAsync(bucket, objectKey, partNumber, initResp.UploadId, ms, uploadPart => { uploadPart.SseCustomerAlgorithm = req.SseCustomerAlgorithm; uploadPart.SseCustomerKey = encryptionKey; uploadPart.SseCustomerKeyMd5 = req.SseCustomerKeyMd5; }, innerToken).ConfigureAwait(false); onPartResponse?.Invoke(resp); return(resp); } }, numParallelParts, token); CompleteMultipartUploadResponse completeResp = await _multipartClient.CompleteMultipartUploadAsync(bucket, objectKey, initResp.UploadId, responses.OrderBy(x => x.PartNumber), null, token).ConfigureAwait(false); return(completeResp); } finally { if (encryptionKey != null) { Array.Clear(encryptionKey, 0, encryptionKey.Length); } } }
public static void MultipartEncryptionTestAPM(IAmazonS3 s3EncryptionClient, IAmazonS3 s3DecryptionClient, string bucketName) { var guid = Guid.NewGuid(); var filePath = Path.Combine(Path.GetTempPath(), $"multi-{guid}.txt"); var retrievedFilepath = Path.Combine(Path.GetTempPath(), $"retrieved-{guid}.txt"); var totalSize = MegaByteSize * 15; UtilityMethods.GenerateFile(filePath, totalSize); string key = $"key-{guid}"; Stream inputStream = File.OpenRead(filePath); try { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.OneZoneInfrequentAccess, ContentType = "text/html" }; InitiateMultipartUploadResponse initResponse = null; if (IsKMSEncryptionClient(s3EncryptionClient)) { initResponse = s3EncryptionClient.InitiateMultipartUpload(initRequest); } else { initResponse = s3EncryptionClient.EndInitiateMultipartUpload( s3EncryptionClient.BeginInitiateMultipartUpload(initRequest, null, null)); } // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MegaByteSize, InputStream = inputStream, }; UploadPartResponse up1Response = s3EncryptionClient.EndUploadPart( s3EncryptionClient.BeginUploadPart(uploadRequest, null, null)); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MegaByteSize, InputStream = inputStream, }; UploadPartResponse up2Response = s3EncryptionClient.EndUploadPart( s3EncryptionClient.BeginUploadPart(uploadRequest, null, null)); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; UploadPartResponse up3Response = s3EncryptionClient.EndUploadPart( s3EncryptionClient.BeginUploadPart(uploadRequest, null, null)); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = s3EncryptionClient.EndListParts( s3EncryptionClient.BeginListParts(listPartRequest, null, null)); Assert.Equal(3, listPartResponse.Parts.Count); Assert.Equal(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.Equal(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.Equal(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.Equal(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.Equal(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.Equal(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = s3EncryptionClient.EndListParts( s3EncryptionClient.BeginListParts(listPartRequest, null, null)); Assert.Equal(1, listPartResponse.Parts.Count); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = s3EncryptionClient.EndCompleteMultipartUpload( s3EncryptionClient.BeginCompleteMultipartUpload(compRequest, null, null)); Assert.Equal(bucketName, compResponse.BucketName); Assert.NotNull(compResponse.ETag); Assert.Equal(key, compResponse.Key); Assert.NotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = null; if (IsKMSEncryptionClient(s3EncryptionClient)) { getResponse = s3DecryptionClient.GetObject(getRequest); } else { getResponse = s3DecryptionClient.EndGetObject( s3DecryptionClient.BeginGetObject(getRequest, null, null)); } getResponse.WriteResponseStreamToFile(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = s3DecryptionClient.EndGetObjectMetadata( s3DecryptionClient.BeginGetObjectMetadata(metaDataRequest, null, null)); Assert.Equal("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Close(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } }
public static void Main(string[] args) { // create the AWS S3 client AmazonS3Client s3 = AWSS3Factory.getS3Client(); // retrieve the object key/value from user Console.Write("Enter the object key: "); string key = Console.ReadLine(); Console.Write("Enter the file location: "); string filePath = Console.ReadLine(); // grab the start time of upload DateTime startDate = DateTime.Now; // part size for chunking in multi-part long partSize = 1024 * 1024 * 2; // 2 MB // list of upload part response objects for each part that is uploaded List <PartETag> partETags = new List <PartETag>(); // Step 1: Initialize InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = AWSS3Factory.S3_BUCKET, Key = key, }; InitiateMultipartUploadResponse initResponse = s3.InitiateMultipartUpload(initRequest); // get the file and file length long contentLength = new FileInfo(filePath).Length; Console.WriteLine(string.Format("Starting multi-part upload for object {0}/{1} with file path {2} and size {3} in {4} MB size chunks", AWSS3Factory.S3_BUCKET, key, filePath, Convert.ToString(contentLength), partSize / 1024 / 1024)); try { // Step 2: Upload parts long filePosition = 0; for (int i = 1; filePosition < contentLength; i++) { // get the size of the chunk. Note - the last part can be less than the chunk size partSize = Math.Min(partSize, (contentLength - filePosition)); Console.WriteLine(string.Format("Sending chunk {0} starting at position {1}", i, filePosition)); // create request to upload a part UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = AWSS3Factory.S3_BUCKET, Key = key, UploadId = initResponse.UploadId, PartNumber = i, FilePosition = filePosition, FilePath = filePath, PartSize = partSize }; UploadPartResponse partResponse = s3.UploadPart(uploadRequest); PartETag eTagPart = new PartETag(partResponse.PartNumber, partResponse.ETag); partETags.Add(eTagPart); filePosition = filePosition += partSize; } // Step 3: complete Console.WriteLine("Waiting for completion of multi-part upload"); CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = AWSS3Factory.S3_BUCKET, Key = key, UploadId = initResponse.UploadId, PartETags = partETags }; s3.CompleteMultipartUpload(compRequest); } catch (Exception e) { s3.AbortMultipartUpload(new AbortMultipartUploadRequest() { BucketName = AWSS3Factory.S3_BUCKET, Key = key, UploadId = initResponse.UploadId }); } // grab the end time of upload DateTime endDate = DateTime.Now; Console.WriteLine(string.Format("Completed multi-part upload for object {0}/{1} with file path: {2}", AWSS3Factory.S3_BUCKET, key, filePath)); Console.WriteLine(string.Format("Process took: {0} seconds.", (endDate - startDate).TotalSeconds.ToString())); Console.ReadLine(); }
private async Task <bool> CompressFile(ILambdaContext context, string inputBucketName, string inputKey) { //Check if file is already zipped if (inputKey.EndsWith(_ZippedFileExtension)) { logger.LogLineWithId($"File already appears to be zipped, file extension is: {_ZippedFileExtension}. Function complete"); return(false); } //Retrieve input stream var obj = await S3Client.GetObjectAsync(new GetObjectRequest { BucketName = inputBucketName, Key = inputKey }); var inputStream = obj.ResponseStream; logger.LogLineWithId("Object response stream obtained"); //Initiate multipart upload string outputBucketName = !string.IsNullOrEmpty(this.variables.OutputBucket) ? this.variables.OutputBucket : inputBucketName; string outputKey = this.GetOutputKey(inputKey); InitiateMultipartUploadRequest uploadRequest = new InitiateMultipartUploadRequest { BucketName = outputBucketName, Key = outputKey }; InitiateMultipartUploadResponse uploadResponse = await S3Client.InitiateMultipartUploadAsync(uploadRequest); var uploadPartResponses = new List <PartETag>(); logger.LogLineWithId($"Upload initiated. Output object: {outputBucketName}/{outputKey}"); var partNumber = 1; var filePartReadSize = this.variables.FilePartReadSize; var minimumUploadSize = this.variables.MinimumUploadSize; var streamEnded = false; //Process input file while (!streamEnded) { using (MemoryStream memoryStream = new MemoryStream()) { //Read chunks of the input file, compressing as we go, until we hit the minimum upload size or the end of the stream using (GZipStream zipStream = new GZipStream(memoryStream, CompressionLevel.Fastest, true)) { while (memoryStream.Length < minimumUploadSize) { //Copy a set number of bytes from the input stream to the compressed memory stream var bytesCopied = CopyBytes(filePartReadSize, inputStream, zipStream); if (bytesCopied == 0) { streamEnded = true; break; } } } logger.LogLineWithId($"Part {partNumber} read and compressed"); //Check to make sure that we have actually read something in before proceeding if (memoryStream.Length == 0) { break; } //Upload memory stream to S3 memoryStream.Position = 0; UploadPartRequest uploadPartRequest = GetUploadPartRequest(outputBucketName, outputKey, uploadResponse.UploadId, partNumber, streamEnded, memoryStream); UploadPartResponse uploadPartResponse = await S3Client.UploadPartAsync(uploadPartRequest); uploadPartResponses.Add(new PartETag { ETag = uploadPartResponse.ETag, PartNumber = partNumber }); logger.LogLineWithId($"Part {partNumber} uploaded."); partNumber++; } //Run the garbage collector so that Lambda doesn't run out of memory for large files GC.Collect(); GC.WaitForPendingFinalizers(); } //Complete multipart upload request logger.LogLineWithId($"Completing upload request"); CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest { BucketName = outputBucketName, Key = outputKey, UploadId = uploadResponse.UploadId, PartETags = uploadPartResponses }; CompleteMultipartUploadResponse compResponse = await S3Client.CompleteMultipartUploadAsync(compRequest); logger.LogLineWithId($"Upload request completed"); inputStream.Dispose(); return(true); }
public async Task ManualMultiplePartUpload() { string resourceName = nameof(ManualMultiplePartUpload); byte[] key = { 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8 }; byte[] keyMd5 = CryptoHelper.Md5Hash(key); InitiateMultipartUploadResponse initResp = await ObjectClient.InitiateMultipartUploadAsync(BucketName, resourceName, request => { request.SseCustomerAlgorithm = SseCustomerAlgorithm.Aes256; request.SseCustomerKey = key; request.SseCustomerKeyMd5 = keyMd5; }).ConfigureAwait(false); Assert.True(initResp.IsSuccess); Assert.Equal(BucketName, initResp.Bucket); Assert.Equal(resourceName, initResp.Key); Assert.NotNull(initResp.UploadId); byte[] file = new byte[1024 * 1024 * 10]; file[0] = (byte)'a'; file[file.Length - 1] = (byte)'b'; byte[][] parts = file.Chunk(file.Length / 2).Select(x => x.ToArray()).ToArray(); UploadPartResponse uploadResp1 = await ObjectClient.UploadPartAsync(BucketName, resourceName, 1, initResp.UploadId, new MemoryStream(parts[0]), request => { request.SseCustomerAlgorithm = SseCustomerAlgorithm.Aes256; request.SseCustomerKey = key; request.SseCustomerKeyMd5 = keyMd5; }).ConfigureAwait(false); Assert.Equal(SseCustomerAlgorithm.Aes256, uploadResp1.SseCustomerAlgorithm); Assert.Equal(keyMd5, uploadResp1.SseCustomerKeyMd5); Assert.True(uploadResp1.IsSuccess); Assert.NotNull(uploadResp1.ETag); UploadPartResponse uploadResp2 = await ObjectClient.UploadPartAsync(BucketName, resourceName, 2, initResp.UploadId, new MemoryStream(parts[1]), request => { request.SseCustomerAlgorithm = SseCustomerAlgorithm.Aes256; request.SseCustomerKey = key; request.SseCustomerKeyMd5 = keyMd5; }).ConfigureAwait(false); Assert.True(uploadResp2.IsSuccess); Assert.NotNull(uploadResp2.ETag); CompleteMultipartUploadResponse completeResp = await ObjectClient.CompleteMultipartUploadAsync(BucketName, resourceName, initResp.UploadId, new[] { uploadResp1, uploadResp2 }).ConfigureAwait(false); Assert.True(completeResp.IsSuccess); Assert.NotNull(uploadResp2.ETag); GetObjectResponse getResp = await ObjectClient.GetObjectAsync(BucketName, resourceName, request => { request.SseCustomerAlgorithm = SseCustomerAlgorithm.Aes256; request.SseCustomerKey = key; request.SseCustomerKeyMd5 = keyMd5; }).ConfigureAwait(false); //Provoke an 'InvalidArgument' error GetObjectResponse gResp1 = await ObjectClient.GetObjectAsync(BucketName, nameof(ManualMultiplePartUpload), request => { request.PartNumber = 0; request.SseCustomerAlgorithm = SseCustomerAlgorithm.Aes256; request.SseCustomerKey = key; request.SseCustomerKeyMd5 = keyMd5; }).ConfigureAwait(false); Assert.False(gResp1.IsSuccess); Assert.IsType <InvalidArgumentError>(gResp1.Error); GetObjectResponse gResp2 = await ObjectClient.GetObjectAsync(BucketName, nameof(ManualMultiplePartUpload), request => { request.PartNumber = 1; request.SseCustomerAlgorithm = SseCustomerAlgorithm.Aes256; request.SseCustomerKey = key; request.SseCustomerKeyMd5 = keyMd5; }).ConfigureAwait(false); Assert.True(gResp2.IsSuccess); Assert.Equal(file.Length / 2, gResp2.Content.AsStream().Length); Assert.Equal(file, await getResp.Content.AsDataAsync().ConfigureAwait(false)); }
public async IAsyncEnumerable <UploadPartResponse> MultipartUploadAsync(InitiateMultipartUploadRequest req, Stream data, int partSize = 16777216, int numParallelParts = 4, [EnumeratorCancellation] CancellationToken token = default) { Validator.RequireNotNull(req); Validator.RequireNotNull(data); if (RequestWrappers != null) { foreach (IRequestWrapper wrapper in RequestWrappers) { if (wrapper.IsSupported(req)) { data = wrapper.Wrap(data, req); } } } string bucket = req.BucketName; string resource = req.Resource; InitiateMultipartUploadResponse initResp = await InitiateMultipartUploadAsync(req, token).ConfigureAwait(false); if (token.IsCancellationRequested) { yield break; } if (!initResp.IsSuccess) { throw new RequestException(initResp.StatusCode, "InitiateMultipartUploadRequest was unsuccessful"); } Queue <Task <UploadPartResponse> > uploads = new Queue <Task <UploadPartResponse> >(); using (SemaphoreSlim semaphore = new SemaphoreSlim(numParallelParts)) { long offset = 0; for (int i = 1; offset < data.Length; i++) { await semaphore.WaitAsync(token).ConfigureAwait(false); if (token.IsCancellationRequested) { break; } long remaining = data.Length - offset; long bufferSize = Math.Min(remaining, partSize); byte[] partData = new byte[bufferSize]; await data.ReadAsync(partData, 0, partData.Length, token).ConfigureAwait(false); uploads.Enqueue(UploadPartAsync(bucket, resource, partData, i, initResp.UploadId, semaphore, token)); offset += partSize; } Queue <UploadPartResponse> responses = new Queue <UploadPartResponse>(uploads.Count); while (uploads.TryDequeue(out Task <UploadPartResponse> task)) { if (token.IsCancellationRequested) { yield break; } UploadPartResponse response = await task.ConfigureAwait(false); responses.Enqueue(response); yield return(response); } CompleteMultipartUploadRequest completeReq = new CompleteMultipartUploadRequest(bucket, resource, initResp.UploadId, responses); CompleteMultipartUploadResponse completeResp = await CompleteMultipartUploadAsync(completeReq, token).ConfigureAwait(false); if (!completeResp.IsSuccess) { throw new RequestException(completeResp.StatusCode, "CompleteMultipartUploadRequest was unsuccessful"); } } }
public void MultipartEncryptionTestInstructionFile() { string filePath = @"C:\temp\Upload15MegFileIn3PartsViaStream.txt"; string retrievedFilepath = @"C:\temp\Upload15MegFileIn3PartsViaStreamRetreived.txt"; int MEG_SIZE = (int)Math.Pow(2, 20) + 4001; long totalSize = (long)(15 * MEG_SIZE); UtilityMethods.GenerateFile(filePath, totalSize); string key = "MultipartEncryptionTestInstrcutionFile" + random.Next(); s3EncryptionClientFileMode.PutBucket(new PutBucketRequest() { BucketName = bucketName }); Stream inputStream = File.OpenRead(filePath); try { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.ReducedRedundancy, ContentType = "text/html", CannedACL = S3CannedACL.PublicRead }; InitiateMultipartUploadResponse initResponse = s3EncryptionClientFileMode.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MEG_SIZE, InputStream = inputStream }; UploadPartResponse up1Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MEG_SIZE + 4001, InputStream = inputStream }; UploadPartResponse up2Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; //uploadRequest.setLastPart(); UploadPartResponse up3Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = s3EncryptionClientFileMode.ListParts(listPartRequest); Assert.AreEqual(3, listPartResponse.Parts.Count); Assert.AreEqual(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.AreEqual(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.AreEqual(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.AreEqual(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.AreEqual(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.AreEqual(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = s3EncryptionClientFileMode.ListParts(listPartRequest); Assert.AreEqual(1, listPartResponse.Parts.Count); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = s3EncryptionClientFileMode.CompleteMultipartUpload(compRequest); Assert.AreEqual(bucketName, compResponse.BucketName); Assert.IsNotNull(compResponse.ETag); Assert.AreEqual(key, compResponse.Key); Assert.IsNotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = s3EncryptionClientFileMode.GetObject(getRequest); getResponse.WriteResponseStreamToFile(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = s3EncryptionClientFileMode.GetObjectMetadata(metaDataRequest); Assert.AreEqual("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Close(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } }
public void ObjectSamples() { { #region ListObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // List all objects ListObjectsRequest listRequest = new ListObjectsRequest { BucketName = "SampleBucket", }; ListObjectsResponse listResponse; do { // Get a list of objects listResponse = client.ListObjects(listRequest); foreach (S3Object obj in listResponse.S3Objects) { Console.WriteLine("Object - " + obj.Key); Console.WriteLine(" Size - " + obj.Size); Console.WriteLine(" LastModified - " + obj.LastModified); Console.WriteLine(" Storage class - " + obj.StorageClass); } // Set the marker property listRequest.Marker = listResponse.NextMarker; } while (listResponse.IsTruncated); #endregion } { #region GetObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObject request GetObjectRequest request = new GetObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and remember to dispose of the response using (GetObjectResponse response = client.GetObject(request)) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { string contents = reader.ReadToEnd(); Console.WriteLine("Object - " + response.Key); Console.WriteLine(" Version Id - " + response.VersionId); Console.WriteLine(" Contents - " + contents); } } #endregion } { #region GetObjectMetadata Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObjectMetadata request GetObjectMetadataRequest request = new GetObjectMetadataRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and view the response GetObjectMetadataResponse response = client.GetObjectMetadata(request); Console.WriteLine("Content Length - " + response.ContentLength); Console.WriteLine("Content Type - " + response.Headers.ContentType); if (response.Expiration != null) { Console.WriteLine("Expiration Date - " + response.Expiration.ExpiryDate); Console.WriteLine("Expiration Rule Id - " + response.Expiration.RuleId); } #endregion } { #region PutObject Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", ContentBody = "This is sample content..." }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 2 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", FilePath = "contents.txt" }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 3 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", }; using (FileStream stream = new FileStream("contents.txt", FileMode.Open)) { request.InputStream = stream; // Put object PutObjectResponse response = client.PutObject(request); } #endregion } { #region DeleteObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectRequest request = new DeleteObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request client.DeleteObject(request); #endregion } { #region DeleteObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectsRequest request = new DeleteObjectsRequest { BucketName = "SampleBucket", Objects = new List <KeyVersion> { new KeyVersion() { Key = "Item1" }, // Versioned item new KeyVersion() { Key = "Item2", VersionId = "Rej8CiBxcZKVK81cLr39j27Y5FVXghDK", }, // Item in subdirectory new KeyVersion() { Key = "Logs/error.txt" } } }; try { // Issue request DeleteObjectsResponse response = client.DeleteObjects(request); } catch (DeleteObjectsException doe) { // Catch error and list error details DeleteObjectsResponse errorResponse = doe.Response; foreach (DeletedObject deletedObject in errorResponse.DeletedObjects) { Console.WriteLine("Deleted item " + deletedObject.Key); } foreach (DeleteError deleteError in errorResponse.DeleteErrors) { Console.WriteLine("Error deleting item " + deleteError.Key); Console.WriteLine(" Code - " + deleteError.Code); Console.WriteLine(" Message - " + deleteError.Message); } } #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region ListVersions Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Turn versioning on for a bucket client.PutBucketVersioning(new PutBucketVersioningRequest { BucketName = "SampleBucket", VersioningConfig = new S3BucketVersioningConfig { Status = "Enable" } }); // Populate bucket with multiple items, each with multiple versions PopulateBucket(client, "SampleBucket"); // Get versions ListVersionsRequest request = new ListVersionsRequest { BucketName = "SampleBucket" }; // Make paged ListVersions calls ListVersionsResponse response; do { response = client.ListVersions(request); // View information about versions foreach (var version in response.Versions) { Console.WriteLine("Key = {0}, Version = {1}, IsLatest = {2}, LastModified = {3}, Size = {4}", version.Key, version.VersionId, version.IsLatest, version.LastModified, version.Size); } request.KeyMarker = response.NextKeyMarker; request.VersionIdMarker = response.NextVersionIdMarker; } while (response.IsTruncated); #endregion } { #region Multipart Upload Sample int MB = (int)Math.Pow(2, 20); // Create a client AmazonS3Client client = new AmazonS3Client(); // Define input stream Stream inputStream = Create13MBDataStream(); // Initiate multipart upload InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1" }; InitiateMultipartUploadResponse initResponse = client.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up1Response = client.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up2Response = client.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream }; UploadPartResponse up3Response = client.UploadPart(uploadRequest); // List parts for current upload ListPartsRequest listPartRequest = new ListPartsRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = client.ListParts(listPartRequest); Debug.Assert(listPartResponse.Parts.Count == 3); // Complete the multipart upload CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartETags = new List <PartETag> { new PartETag { ETag = up1Response.ETag, PartNumber = 1 }, new PartETag { ETag = up2Response.ETag, PartNumber = 2 }, new PartETag { ETag = up3Response.ETag, PartNumber = 3 } } }; CompleteMultipartUploadResponse compResponse = client.CompleteMultipartUpload(compRequest); #endregion } }
public async Task MultipartEncryptionTestInstructionFile() { string filePath = Path.Combine(Path.GetTempPath(), "MulitpartEncryptionTestInstructionFile_upload.txt"); string retrievedFilepath = Path.Combine(Path.GetTempPath(), "MulitpartEncryptionTestInstructionFile_download.txt"); int MEG_SIZE = (int)Math.Pow(2, 20); long totalSize = (long)(15 * MEG_SIZE) + 4001; UtilityMethods.GenerateFile(filePath, totalSize); _filesToDelete.Add(filePath); string key = "MultipartEncryptionTestInstrcutionFile" + random.Next(); using (Stream inputStream = File.OpenRead(filePath)) { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.ReducedRedundancy, ContentType = "text/html", CannedACL = S3CannedACL.PublicRead }; InitiateMultipartUploadResponse initResponse = await s3EncryptionClientFileMode.InitiateMultipartUploadAsync(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MEG_SIZE, InputStream = inputStream }; UploadPartResponse up1Response = await s3EncryptionClientFileMode.UploadPartAsync(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MEG_SIZE + 4001, InputStream = inputStream }; UploadPartResponse up2Response = await s3EncryptionClientFileMode.UploadPartAsync(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; //uploadRequest.setLastPart(); UploadPartResponse up3Response = await s3EncryptionClientFileMode.UploadPartAsync(uploadRequest); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = await s3EncryptionClientFileMode.ListPartsAsync(listPartRequest); Assert.Equal(3, listPartResponse.Parts.Count); Assert.Equal(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.Equal(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.Equal(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.Equal(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.Equal(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.Equal(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = await s3EncryptionClientFileMode.ListPartsAsync(listPartRequest); Assert.Equal(1, listPartResponse.Parts.Count); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = await s3EncryptionClientFileMode.CompleteMultipartUploadAsync(compRequest); Assert.Equal(bucketName, compResponse.BucketName); Assert.NotNull(compResponse.ETag); Assert.Equal(key, compResponse.Key); Assert.NotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = await s3EncryptionClientFileMode.GetObjectAsync(getRequest); await getResponse.WriteResponseStreamToFileAsync(retrievedFilepath, false, System.Threading.CancellationToken.None); _filesToDelete.Add(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = await s3EncryptionClientFileMode.GetObjectMetadataAsync(metaDataRequest); Assert.Equal("text/html", metaDataResponse.Headers.ContentType); } }