static void Main(string[] args) { IAmazonS3 s3Client = new AmazonS3Client(Amazon.RegionEndpoint.USEast1); // List to store upload part responses. List<UploadPartResponse> uploadResponses = new List<UploadPartResponse>(); List<CopyPartResponse> copyResponses = new List<CopyPartResponse>(); InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest { BucketName = targetBucket, Key = targetObjectKey }; InitiateMultipartUploadResponse initResponse = s3Client.InitiateMultipartUpload(initiateRequest); String uploadId = initResponse.UploadId; try { // Get object size. GetObjectMetadataRequest metadataRequest = new GetObjectMetadataRequest { BucketName = sourceBucket, Key = sourceObjectKey }; GetObjectMetadataResponse metadataResponse = s3Client.GetObjectMetadata(metadataRequest); long objectSize = metadataResponse.ContentLength; // in bytes // Copy parts. long partSize = 5 * (long)Math.Pow(2, 20); // 5 MB long bytePosition = 0; for (int i = 1; bytePosition < objectSize; i++) { CopyPartRequest copyRequest = new CopyPartRequest { DestinationBucket = targetBucket, DestinationKey = targetObjectKey, SourceBucket = sourceBucket, SourceKey = sourceObjectKey, UploadId = uploadId, FirstByte = bytePosition, LastByte = bytePosition + partSize - 1 >= objectSize ? objectSize - 1 : bytePosition + partSize - 1, PartNumber = i }; copyResponses.Add(s3Client.CopyPart(copyRequest)); bytePosition += partSize; } CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = targetBucket, Key = targetObjectKey, UploadId = initResponse.UploadId }; completeRequest.AddPartETags(copyResponses); CompleteMultipartUploadResponse completeUploadResponse = s3Client.CompleteMultipartUpload(completeRequest); } catch (Exception e) { Console.WriteLine(e.Message); } }
/// <summary> /// Completes multiple part upload process. /// Sends final request to Amazon S3 to merge all parts already sent to storage. /// </summary> /// <param name="key">Unique identifier for an object within a bucket.</param> /// <param name="bucket">Existing Amazon S3 bucket.</param> /// <param name="uploadId">Unique identifier for one multipart upload. Can be obtained by <see cref="M:CMS.AmazonStorage.S3MultiPartUploader.InitMultiPartUpload(System.String,System.String)" /> method.</param> /// <param name="uploadedPartResponses">List of responses from Amazon S3 received after uploading each part.</param> /// <returns>Response from Amazon S3 storage after finishing the multipart upload.</returns> public CompleteMultipartUploadResponse CompleteMultiPartUploadProcess(string key, string bucket, string uploadId, IEnumerable <UploadPartResponse> uploadedPartResponses) { var request = new CompleteMultipartUploadRequest { Key = key, BucketName = bucket, UploadId = uploadId }; request.AddPartETags(uploadedPartResponses); return(this.mS3Client.CompleteMultipartUpload(request)); }
internal void CompleteMultipartUpload() { CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = this.bucketName, Key = this.fileName, UploadId = this.initResponse.UploadId, }; completeRequest.AddPartETags(this.uploadResponses); this.client.CompleteMultipartUpload(completeRequest); }
private CompleteMultipartUploadRequest ConstructCompleteMultipartUploadRequest(InitiateMultipartUploadResponse initResponse) { var compRequest = new CompleteMultipartUploadRequest() { BucketName = this._fileTransporterRequest.BucketName, Key = this._fileTransporterRequest.Key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(this._uploadResponses); ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)compRequest).AddBeforeRequestHandler(this.RequestEventHandler); return(compRequest); }
private CompleteMultipartUploadRequest ConstructCompleteMultipartUploadRequest(InitiateMultipartUploadResponse initResponse) { var compRequest = new CompleteMultipartUploadRequest() { BucketName = this._fileTransporterRequest.BucketName, Key = this._fileTransporterRequest.Key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(this._uploadResponses); compRequest.BeforeRequestEvent += this.RequestEventHandler; return(compRequest); }
private static async Task <CompleteMultipartUploadResponse> CompleteUploadPart(string bucketName, string key, string uploadId, List <PartETag> partETags) { var request = new CompleteMultipartUploadRequest { BucketName = bucketName, Key = key, UploadId = uploadId }; foreach (var etag in partETags) { request.AddPartETags(etag); } return(await Sample.Client.CompleteMultipartUploadAsync(request)); }
public async Task <string> Complete() { var responses = await Task.WhenAll(_blocks).ConfigureAwait(false); var req = new CompleteMultipartUploadRequest { BucketName = _bucket, Key = _key, UploadId = await _uploadId.ConfigureAwait(false), }; req.AddPartETags(responses); var r = await _client.CompleteMultipartUploadAsync(req, _ct).ConfigureAwait(false); return(r.VersionId); }
/// <summary> /// Send a CompleteMultipartUploadRequest request and return the response. /// </summary> /// <param name="existingBucketName"></param> /// <param name="keyName"></param> /// <param name="uploadID"></param> /// <returns>Task<CompleteMultipartUploadRequest></returns> public async Task <CompleteMultipartUploadRequest> CompleteMultipartUploadAsync(string existingBucketName, string keyName, string uploadId, CancellationToken token) { token.ThrowIfCancellationRequested(); _log.Debug("Called CompleteMultipartUploadAsync with parameters keyName = \"" + keyName + "\" and uploadID = \"" + uploadId + "\"."); try { CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = existingBucketName, Key = keyName, UploadId = uploadId, }; // in any case request list parts to send etags for each part. var uploadedParts = await this.ListPartsAsync(existingBucketName, keyName, uploadId, token).ConfigureAwait(false); List <PartETag> eTags = new List <PartETag>(); foreach (var part in uploadedParts) { eTags.Add(new PartETag(part.PartNumber, part.ETag)); } completeRequest.AddPartETags(eTags); token.ThrowIfCancellationRequested(); CompleteMultipartUploadResponse completeResponse = await s3Client.CompleteMultipartUploadAsync(completeRequest, token).ConfigureAwait(false); return(completeRequest); } catch (Exception e) { if (!(e is TaskCanceledException || e is OperationCanceledException)) { string messagePart = " with parameters keyName = \"" + keyName + "\" and uploadID = \"" + uploadId + "\""; this.LogAmazonException(messagePart, e); } throw; } }
private async Task <CompleteMultipartUploadResponse> MultipartCopy(string sourceKey, string destinationKey, long objectSize, InitiateMultipartUploadRequest initiateRequest) { var copyResponses = new List <CopyPartResponse>(); var partSize = 5 * (long)Math.Pow(2, 20); // Part size is 5 MB. // Initiate the upload. var initResponse = await _s3Client.InitiateMultipartUploadAsync(initiateRequest); long bytePosition = 0; for (int i = 1; bytePosition < objectSize; i++) { var copyRequest = new CopyPartRequest { DestinationBucket = _bucket, DestinationKey = destinationKey, SourceBucket = _bucket, SourceKey = sourceKey, UploadId = initResponse.UploadId, FirstByte = bytePosition, LastByte = bytePosition + partSize - 1 >= objectSize ? objectSize - 1 : bytePosition + partSize - 1, PartNumber = i, }; copyResponses.Add(await _s3Client.CopyPartAsync(copyRequest)); bytePosition += partSize; } // Set up to complete the copy. var completeRequest = new CompleteMultipartUploadRequest { BucketName = _bucket, Key = destinationKey, UploadId = initResponse.UploadId }; completeRequest.AddPartETags(copyResponses); // Complete the copy. return(await _s3Client.CompleteMultipartUploadAsync(completeRequest)); }
private CompleteMultipartUploadRequest ConstructCompleteMultipartUploadRequest(InitiateMultipartUploadResponse initResponse) { if (this._uploadResponses.Count != this._totalNumberOfParts) { throw new InvalidOperationException($"Cannot complete multipart upload request. The total number of completed parts ({this._uploadResponses.Count}) " + $"does not equal the total number of parts created ({this._totalNumberOfParts})."); } var compRequest = new CompleteMultipartUploadRequest() { BucketName = this._fileTransporterRequest.BucketName, Key = this._fileTransporterRequest.Key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(this._uploadResponses); ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)compRequest).AddBeforeRequestHandler(this.RequestEventHandler); return(compRequest); }
/// <summary> /// Signal the upload for this uploadId and fileName is complete /// </summary> /// <param name="s3Client">S3 client object.</param> /// <param name="uploadTarget">Upload target returned from the Upload REST API.</param> /// <param name="fileName">Name of the file to be uploaded.</param> /// <param name="uploadId">Upload ID returned from the S3 server.</param> /// <param name="partResponses">List of upload part responses from the server.</param> /// <returns>Response from S3 server.</returns> public static CompleteMultipartUploadResponse CloseUpload( AmazonS3Client s3Client, string uploadTarget, string fileName, string uploadId, List <UploadPartResponse> partResponses) { string fileKey = GetFileKey(uploadTarget, fileName); CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = Common.UploadBucketName, Key = fileKey, UploadId = uploadId, }; completeRequest.AddPartETags(partResponses); return(s3Client.CompleteMultipartUpload(completeRequest)); }
/// <summary> /// Signal the upload for this uploadId and fileName is complete /// </summary> /// <param name="s3Client">S3 client object.</param> /// <param name="uploadTarget">Upload target returned from the Upload REST API.</param> /// <param name="fileName">Name of the file to be uploaded.</param> /// <param name="uploadId">Upload ID returned from the S3 server.</param> /// <param name="partResponses">List of upload part responses from the server.</param> /// <returns>Response from S3 server.</returns> public static CompleteMultipartUploadResponse CloseUpload( AmazonS3Client s3Client, string uploadTarget, string fileName, string uploadId, List <UploadPartResponse> partResponses) { if (s3Client == null || uploadTarget == null || fileName == null || uploadId == null || partResponses == null || partResponses.Count <= 0) { throw new InvalidDataException(); } string fileKey = GetFileKey(uploadTarget, fileName); CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = Common.UploadBucketName, Key = fileKey, UploadId = uploadId, }; completeRequest.AddPartETags(partResponses); return(s3Client.CompleteMultipartUpload(completeRequest)); }
private async Task MultiPartUploadAsync(Stream sourceStream, string sourceMD5, string bucketName, string keyName, CancellationToken cancellationToken) { // Create list to store upload part responses. var uploadResponses = new List <UploadPartResponse>(); // Setup information required to initiate the multipart upload. var initiateRequest = new InitiateMultipartUploadRequest { BucketName = bucketName, Key = keyName }; var validateMD5 = !string.IsNullOrEmpty(sourceMD5); if (validateMD5) { initiateRequest.Metadata.Add("Content-MD5", sourceMD5); } // Initiate the upload. var initResponse = await _amazonS3.InitiateMultipartUploadAsync(initiateRequest, cancellationToken); try { var blobHasher = validateMD5 ? CopyUtils.GetMD5Hasher() : null; var partSize = 10 * 1024 * 1024; // todo: config var partNo = 1; await CopyUtils.CopyAsync( (buffer, ct) => CopyUtils.ReadStreamMaxBufferAsync(buffer, sourceStream, ct), async (buffer, count, cancellationToken2) => { var blockMD5Hash = CopyUtils.GetMD5HashString(buffer, 0, count); if (validateMD5) { CopyUtils.AppendMDHasherData(blobHasher, buffer, 0, count); } using (var ms = new MemoryStream(buffer, 0, count)) { ms.Position = 0; partNo++; var uploadRequest = new UploadPartRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId, PartNumber = partNo, PartSize = count, InputStream = ms, MD5Digest = blockMD5Hash }; // Upload a part and add the response to our list. var uploadResponse = await _amazonS3.UploadPartAsync(uploadRequest, cancellationToken); uploadResponses.Add(uploadResponse); } }, partSize, cancellationToken ); if (validateMD5) { var blobHash = CopyUtils.GetMD5HashString(blobHasher); if ((!string.IsNullOrEmpty(sourceMD5)) && (sourceMD5 != blobHash)) { throw new Exception("Invalid destination MD5"); } } // Setup to complete the upload. var completeRequest = new CompleteMultipartUploadRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId }; completeRequest.AddPartETags(uploadResponses); // Complete the upload. var completeUploadResponse = await _amazonS3.CompleteMultipartUploadAsync(completeRequest, cancellationToken); } catch (Exception exception) { Console.WriteLine("An AmazonS3Exception was thrown: { 0}", exception.Message); // Abort the upload. var abortMPURequest = new AbortMultipartUploadRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId }; await _amazonS3.AbortMultipartUploadAsync(abortMPURequest, cancellationToken); } }
public static async Task PutObjectAsMultiPartAsync() { Stopwatch sw = Stopwatch.StartNew(); s3Client = new AmazonS3Client( "AKIA6PYYJMASLJEFTI6E", "IJPo9Ys58iAb35dKw4kcW/SkOU2J+iI9IOA5Wpl6", Amazon.RegionEndpoint.APSoutheast1 ); List <UploadPartResponse> uploadResponses = new List <UploadPartResponse>(); // List to store upload part responses. // 1. Initialize. var initiateRequest = new InitiateMultipartUploadRequest { BucketName = bucketName, Key = keyName }; var initResponse = s3Client.InitiateMultipartUpload(initiateRequest); // 2. Upload Parts. var partNumber = 5; var contentLength = new FileInfo(filePath).Length; var partSize = contentLength / partNumber; long filePosition = 0; try { for (var i = 1; filePosition < contentLength; ++i) { // Create request to upload a part. Console.WriteLine(filePosition); var uploadRequest = new UploadPartRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId, PartNumber = i, PartSize = partSize, FilePosition = filePosition, FilePath = filePath }; // Upload part and add response to our list. uploadResponses.Add(s3Client.UploadPart(uploadRequest)); filePosition += partSize; } // Step 3: complete. var completeRequest = new CompleteMultipartUploadRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId, }; // add ETags for uploaded files completeRequest.AddPartETags(uploadResponses); var completeUploadResponse = s3Client.CompleteMultipartUpload(completeRequest); sw.Stop(); Console.WriteLine("Upload completed : {0}", sw.Elapsed.TotalMilliseconds); } catch (Exception exception) { Console.WriteLine("Exception occurred: {0}", exception.ToString()); var abortMPURequest = new AbortMultipartUploadRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId }; s3Client.AbortMultipartUpload(abortMPURequest); } }
/// <summary> /// Copies large file as chunks from Azure BLOB to Amazon S3. /// </summary> /// <returns></returns> public async Task CopyLargeFileFromAzureBlobToAwsS3() { AmazonS3Client s3Client = new AmazonS3Client(AwsAccessKeyId, AwsSecretKey, Amazon.RegionEndpoint.APSouth1); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(StorageAccount); //Create Storage account reference. CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Create the blob client. CloudBlobContainer container = blobClient.GetContainerReference(ContainerName); // Retrieve reference to a container. container.CreateIfNotExists(); CloudBlockBlob blob = container.GetBlockBlobReference(BlobFileName); // Create Blob reference. blob.FetchAttributes(); // Prepare blob instance To get the file length. var remainingBytes = blob.Properties.Length; long readPosition = 0; // To be used offset / position from where to start reading from BLOB. InitiateMultipartUploadRequest initiateMultipartUploadRequest = new InitiateMultipartUploadRequest { BucketName = AwsS3BucketName, Key = TargetFileName }; // Will use UploadId from this response. InitiateMultipartUploadResponse initiateMultipartUploadResponse = s3Client.InitiateMultipartUpload(initiateMultipartUploadRequest); List <UploadPartResponse> uploadPartResponses = new List <UploadPartResponse>(); Stopwatch stopwatch = Stopwatch.StartNew(); try { int partCounter = 0; // To increment on each read of parts and use it as part number. while (remainingBytes > 0) { // Determine the size when final block reached as it might be less than Part size. // Will be PartSize except final block. long bytesToCopy = Math.Min(PartSize, remainingBytes); using (MemoryStream memoryStream = new MemoryStream()) { // To download part from BLOB. await blob.DownloadRangeToStreamAsync(memoryStream, readPosition, bytesToCopy).ConfigureAwait(false); memoryStream.Position = 0; partCounter++; UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = AwsS3BucketName, Key = TargetFileName, UploadId = initiateMultipartUploadResponse.UploadId, PartNumber = partCounter, PartSize = bytesToCopy, InputStream = memoryStream }; UploadPartResponse uploadPartResponse = s3Client.UploadPart(uploadRequest); uploadPartResponses.Add(uploadPartResponse); remainingBytes -= bytesToCopy; readPosition += bytesToCopy; this.logger.WriteLine($"Uploaded part with part number {partCounter}, size {bytesToCopy}bytes and remaining {remainingBytes}bytes to read."); } } CompleteMultipartUploadRequest completeMultipartUploadRequest = new CompleteMultipartUploadRequest { BucketName = AwsS3BucketName, Key = TargetFileName, UploadId = initiateMultipartUploadResponse.UploadId }; completeMultipartUploadRequest.AddPartETags(uploadPartResponses); CompleteMultipartUploadResponse completeMultipartUploadResponse = await s3Client.CompleteMultipartUploadAsync(completeMultipartUploadRequest).ConfigureAwait(false); } catch (Exception exception) { this.logger.WriteLine($"Exception : {exception.Message}"); AbortMultipartUploadRequest abortMultipartUploadRequest = new AbortMultipartUploadRequest { BucketName = AwsS3BucketName, Key = TargetFileName, UploadId = initiateMultipartUploadResponse.UploadId }; await s3Client.AbortMultipartUploadAsync(abortMultipartUploadRequest).ConfigureAwait(false); } finally { stopwatch.Stop(); this.logger.WriteLine($"Execution time in mins: {stopwatch.Elapsed.TotalMinutes}"); } }
private async Task <bool> UploadObjectBandWidthThrottlingAsync(string bucketName, string localFullFilename, string key, CancellationToken token, int sleepMiliSec, bool publicReadTrueOrFalse = false) { bool bReturn = true; List <UploadPartResponse> uploadResponses = new List <UploadPartResponse>(); InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest { BucketName = bucketName, Key = key }; if (publicReadTrueOrFalse) { initiateRequest.CannedACL = S3CannedACL.PublicRead; } IAmazonS3 s3Client = new AmazonS3Client(AccessKey, SecretKey, new AmazonS3Config { ServiceURL = ServiceUrl, BufferSize = 64 * (int)Math.Pow(2, 10), ProgressUpdateInterval = this.ProgressUpdateInterval, Timeout = new TimeSpan(1, 0, 0, 0, 0) }); InitiateMultipartUploadResponse initResponse = await s3Client.InitiateMultipartUploadAsync(initiateRequest, token); long contentLength = new FileInfo(localFullFilename).Length; ContentTotalBytes = contentLength; long partSize = 5 * (long)Math.Pow(2, 20); try { long filePosition = 0; for (int i = 1; filePosition < contentLength; i++) { if (token.IsCancellationRequested) { bReturn = false; break; } var task = Task.Run(() => Thread.Sleep(sleepMiliSec)); await task; UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = i, PartSize = partSize, FilePosition = filePosition, FilePath = localFullFilename }; uploadRequest.StreamTransferProgress += ProgressEventCallback; uploadResponses.Add(await s3Client.UploadPartAsync(uploadRequest, token)); filePosition += partSize; CurrentTransferredBytes = filePosition; } CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; completeRequest.AddPartETags(uploadResponses); CompleteMultipartUploadResponse completeUploadResponse = await s3Client.CompleteMultipartUploadAsync(completeRequest, token); } catch (Exception) { AbortMultipartUploadRequest abortMPURequest = new AbortMultipartUploadRequest { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; await s3Client.AbortMultipartUploadAsync(abortMPURequest); throw; } finally { s3Client.Dispose(); } return(bReturn); }
private static async Task CopyObjectAsync(IAmazonS3 s3Client, string base64Key) { List <CopyPartResponse> uploadResponses = new List <CopyPartResponse>(); // 1. Initialize. InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest { BucketName = existingBucketName, Key = targetKeyName, ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, ServerSideEncryptionCustomerProvidedKey = base64Key, }; InitiateMultipartUploadResponse initResponse = await s3Client.InitiateMultipartUploadAsync(initiateRequest); // 2. Upload Parts. long partSize = 5 * (long)Math.Pow(2, 20); // 5 MB long firstByte = 0; long lastByte = partSize; try { // First find source object size. Because object is stored encrypted with // customer provided key you need to provide encryption information in your request. GetObjectMetadataRequest getObjectMetadataRequest = new GetObjectMetadataRequest() { BucketName = existingBucketName, Key = sourceKeyName, ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, ServerSideEncryptionCustomerProvidedKey = base64Key // " * **source object encryption key ***" }; GetObjectMetadataResponse getObjectMetadataResponse = await s3Client.GetObjectMetadataAsync(getObjectMetadataRequest); long filePosition = 0; for (int i = 1; filePosition < getObjectMetadataResponse.ContentLength; i++) { CopyPartRequest copyPartRequest = new CopyPartRequest { UploadId = initResponse.UploadId, // Source. SourceBucket = existingBucketName, SourceKey = sourceKeyName, // Source object is stored using SSE-C. Provide encryption information. CopySourceServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, CopySourceServerSideEncryptionCustomerProvidedKey = base64Key, //"***source object encryption key ***", FirstByte = firstByte, // If the last part is smaller then our normal part size then use the remaining size. LastByte = lastByte > getObjectMetadataResponse.ContentLength ? getObjectMetadataResponse.ContentLength - 1 : lastByte, // Target. DestinationBucket = existingBucketName, DestinationKey = targetKeyName, PartNumber = i, // Encryption information for the target object. ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, ServerSideEncryptionCustomerProvidedKey = base64Key }; uploadResponses.Add(await s3Client.CopyPartAsync(copyPartRequest)); filePosition += partSize; firstByte += partSize; lastByte += partSize; } // Step 3: complete. CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = existingBucketName, Key = targetKeyName, UploadId = initResponse.UploadId, }; completeRequest.AddPartETags(uploadResponses); CompleteMultipartUploadResponse completeUploadResponse = await s3Client.CompleteMultipartUploadAsync(completeRequest); } catch (Exception exception) { Console.WriteLine("Exception occurred: {0}", exception.Message); AbortMultipartUploadRequest abortMPURequest = new AbortMultipartUploadRequest { BucketName = existingBucketName, Key = targetKeyName, UploadId = initResponse.UploadId }; s3Client.AbortMultipartUpload(abortMPURequest); } }
private async Task MultipartUploadObject(FileInfo fileInfo, string objectKey, PartSize partSize) { if (fileInfo is null) { throw new ArgumentNullException(nameof(fileInfo)); } if (partSize is null) { throw new ArgumentNullException(nameof(partSize)); } var multipartUploadRequest = new InitiateMultipartUploadRequest() { BucketName = _bucketName, Key = objectKey, }; await Initialize().ConfigureAwait(false); var multipartUploadResponse = await Client.InitiateMultipartUploadAsync(multipartUploadRequest).ConfigureAwait(false); var a = (fileInfo.Length > partSize) ? partSize : (int)fileInfo.Length; try { var taskList = new List <Task <UploadPartResponse> >(); var partResponses = new List <UploadPartResponse>(); for (var i = 0; partSize *i < fileInfo.Length; i++) { var upload = new UploadPartRequest() { BucketName = _bucketName, Key = objectKey, UploadId = multipartUploadResponse.UploadId, PartNumber = i + 1, PartSize = a, FilePosition = partSize * i, FilePath = fileInfo.FullName, }; if ((i + 1) * partSize > fileInfo.Length) { a = (int)fileInfo.Length % partSize; } if (taskList.Count >= _parallelParts) { await Task.WhenAny(taskList).ConfigureAwait(false); foreach (var task in taskList) { if (task.IsCompleted) { partResponses.Add(await task.ConfigureAwait(false)); taskList.Remove(task); } } } else { taskList.Add(Client.UploadPartAsync(upload)); } } foreach (var task in taskList) { partResponses.Add(await task.ConfigureAwait(false)); } var completeRequest = new CompleteMultipartUploadRequest { BucketName = _bucketName, Key = objectKey, UploadId = multipartUploadResponse.UploadId, }; completeRequest.AddPartETags(partResponses); var completeUploadResponse = await Client.CompleteMultipartUploadAsync(completeRequest).ConfigureAwait(false); } catch { var abortMultipartUploadRequest = new AbortMultipartUploadRequest { BucketName = _bucketName, Key = objectKey, UploadId = multipartUploadResponse.UploadId, }; await Client.AbortMultipartUploadAsync(abortMultipartUploadRequest).ConfigureAwait(false); throw; } }
public static async Task MultipartEncryptionTestAsync(AmazonS3Client s3EncryptionClient, AmazonS3Client s3DecryptionClient, string bucketName) { var filePath = Path.GetTempFileName(); var retrievedFilepath = Path.GetTempFileName(); var totalSize = MegaBytesSize * 15; UtilityMethods.GenerateFile(filePath, totalSize); var key = Guid.NewGuid().ToString(); Stream inputStream = File.OpenRead(filePath); try { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.OneZoneInfrequentAccess, ContentType = "text/html", }; InitiateMultipartUploadResponse initResponse = await s3EncryptionClient.InitiateMultipartUploadAsync(initRequest).ConfigureAwait(false); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MegaBytesSize, InputStream = inputStream, }; UploadPartResponse up1Response = await s3EncryptionClient.UploadPartAsync(uploadRequest).ConfigureAwait(false); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MegaBytesSize, InputStream = inputStream }; UploadPartResponse up2Response = await s3EncryptionClient.UploadPartAsync(uploadRequest).ConfigureAwait(false); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; UploadPartResponse up3Response = await s3EncryptionClient.UploadPartAsync(uploadRequest).ConfigureAwait(false); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = await s3EncryptionClient.ListPartsAsync(listPartRequest).ConfigureAwait(false); Assert.Equal(3, listPartResponse.Parts.Count); Assert.Equal(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.Equal(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.Equal(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.Equal(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.Equal(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.Equal(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = await s3EncryptionClient.ListPartsAsync(listPartRequest).ConfigureAwait(false); Assert.Single(listPartResponse.Parts); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = await s3EncryptionClient.CompleteMultipartUploadAsync(compRequest).ConfigureAwait(false); Assert.Equal(bucketName, compResponse.BucketName); Assert.NotNull(compResponse.ETag); Assert.Equal(key, compResponse.Key); Assert.NotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = await s3DecryptionClient.GetObjectAsync(getRequest).ConfigureAwait(false); await getResponse.WriteResponseStreamToFileAsync(retrievedFilepath, false, CancellationToken.None); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = await s3DecryptionClient.GetObjectMetadataAsync(metaDataRequest).ConfigureAwait(false); Assert.Equal("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Dispose(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } }
public static void MultipartEncryptionTest(AmazonS3Client s3EncryptionClient, IAmazonS3 s3DecryptionClient, string bucketName) { var guid = Guid.NewGuid(); var filePath = Path.Combine(Path.GetTempPath(), $"multi-{guid}.txt"); var retrievedFilepath = Path.Combine(Path.GetTempPath(), $"retrieved-{guid}.txt"); var totalSize = MegaByteSize * 15; UtilityMethods.GenerateFile(filePath, totalSize); var key = $"key-{guid}"; Stream inputStream = File.OpenRead(filePath); try { var initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.OneZoneInfrequentAccess, ContentType = "text/html", }; var initResponse = s3EncryptionClient.InitiateMultipartUpload(initRequest); // Upload part 1 var uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MegaByteSize, InputStream = inputStream }; var up1Response = s3EncryptionClient.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MegaByteSize, InputStream = inputStream }; var up2Response = s3EncryptionClient.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; var up3Response = s3EncryptionClient.UploadPart(uploadRequest); var listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; var listPartResponse = s3EncryptionClient.ListParts(listPartRequest); Assert.Equal(3, listPartResponse.Parts.Count); Assert.Equal(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.Equal(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.Equal(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.Equal(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.Equal(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.Equal(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = s3EncryptionClient.ListParts(listPartRequest); Assert.Equal(1, listPartResponse.Parts.Count); // Complete the response var compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); var compResponse = s3EncryptionClient.CompleteMultipartUpload(compRequest); Assert.Equal(bucketName, compResponse.BucketName); Assert.NotNull(compResponse.ETag); Assert.Equal(key, compResponse.Key); Assert.NotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. var getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; var getResponse = s3DecryptionClient.GetObject(getRequest); getResponse.WriteResponseStreamToFile(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); var metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; var metaDataResponse = s3DecryptionClient.GetObjectMetadata(metaDataRequest); Assert.Equal("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Close(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } #if ASYNC_AWAIT // run the async version of the same test WaitForAsyncTask(MultipartEncryptionTestAsync(s3EncryptionClient, s3DecryptionClient, bucketName)); #elif AWS_APM_API // run the APM version of the same test MultipartEncryptionTestAPM(s3EncryptionClient, s3DecryptionClient, bucketName); #endif }
public static void MultipartEncryptionTestAPM(IAmazonS3 s3EncryptionClient, IAmazonS3 s3DecryptionClient, string bucketName) { var guid = Guid.NewGuid(); var filePath = Path.Combine(Path.GetTempPath(), $"multi-{guid}.txt"); var retrievedFilepath = Path.Combine(Path.GetTempPath(), $"retrieved-{guid}.txt"); var totalSize = MegaByteSize * 15; UtilityMethods.GenerateFile(filePath, totalSize); string key = $"key-{guid}"; Stream inputStream = File.OpenRead(filePath); try { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.OneZoneInfrequentAccess, ContentType = "text/html" }; InitiateMultipartUploadResponse initResponse = null; if (IsKMSEncryptionClient(s3EncryptionClient)) { initResponse = s3EncryptionClient.InitiateMultipartUpload(initRequest); } else { initResponse = s3EncryptionClient.EndInitiateMultipartUpload( s3EncryptionClient.BeginInitiateMultipartUpload(initRequest, null, null)); } // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MegaByteSize, InputStream = inputStream, }; UploadPartResponse up1Response = s3EncryptionClient.EndUploadPart( s3EncryptionClient.BeginUploadPart(uploadRequest, null, null)); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MegaByteSize, InputStream = inputStream, }; UploadPartResponse up2Response = s3EncryptionClient.EndUploadPart( s3EncryptionClient.BeginUploadPart(uploadRequest, null, null)); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; UploadPartResponse up3Response = s3EncryptionClient.EndUploadPart( s3EncryptionClient.BeginUploadPart(uploadRequest, null, null)); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = s3EncryptionClient.EndListParts( s3EncryptionClient.BeginListParts(listPartRequest, null, null)); Assert.Equal(3, listPartResponse.Parts.Count); Assert.Equal(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.Equal(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.Equal(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.Equal(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.Equal(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.Equal(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = s3EncryptionClient.EndListParts( s3EncryptionClient.BeginListParts(listPartRequest, null, null)); Assert.Equal(1, listPartResponse.Parts.Count); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = s3EncryptionClient.EndCompleteMultipartUpload( s3EncryptionClient.BeginCompleteMultipartUpload(compRequest, null, null)); Assert.Equal(bucketName, compResponse.BucketName); Assert.NotNull(compResponse.ETag); Assert.Equal(key, compResponse.Key); Assert.NotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = null; if (IsKMSEncryptionClient(s3EncryptionClient)) { getResponse = s3DecryptionClient.GetObject(getRequest); } else { getResponse = s3DecryptionClient.EndGetObject( s3DecryptionClient.BeginGetObject(getRequest, null, null)); } getResponse.WriteResponseStreamToFile(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = s3DecryptionClient.EndGetObjectMetadata( s3DecryptionClient.BeginGetObjectMetadata(metaDataRequest, null, null)); Assert.Equal("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Close(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } }
public async Task MultipartEncryptionTestInstructionFile() { string filePath = Path.Combine(Path.GetTempPath(), "MulitpartEncryptionTestInstructionFile_upload.txt"); string retrievedFilepath = Path.Combine(Path.GetTempPath(), "MulitpartEncryptionTestInstructionFile_download.txt"); int MEG_SIZE = (int)Math.Pow(2, 20); long totalSize = (long)(15 * MEG_SIZE) + 4001; UtilityMethods.GenerateFile(filePath, totalSize); _filesToDelete.Add(filePath); string key = "MultipartEncryptionTestInstrcutionFile" + random.Next(); using (Stream inputStream = File.OpenRead(filePath)) { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.ReducedRedundancy, ContentType = "text/html", CannedACL = S3CannedACL.PublicRead }; InitiateMultipartUploadResponse initResponse = await s3EncryptionClientFileMode.InitiateMultipartUploadAsync(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MEG_SIZE, InputStream = inputStream }; UploadPartResponse up1Response = await s3EncryptionClientFileMode.UploadPartAsync(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MEG_SIZE + 4001, InputStream = inputStream }; UploadPartResponse up2Response = await s3EncryptionClientFileMode.UploadPartAsync(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; //uploadRequest.setLastPart(); UploadPartResponse up3Response = await s3EncryptionClientFileMode.UploadPartAsync(uploadRequest); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = await s3EncryptionClientFileMode.ListPartsAsync(listPartRequest); Assert.Equal(3, listPartResponse.Parts.Count); Assert.Equal(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.Equal(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.Equal(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.Equal(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.Equal(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.Equal(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = await s3EncryptionClientFileMode.ListPartsAsync(listPartRequest); Assert.Equal(1, listPartResponse.Parts.Count); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = await s3EncryptionClientFileMode.CompleteMultipartUploadAsync(compRequest); Assert.Equal(bucketName, compResponse.BucketName); Assert.NotNull(compResponse.ETag); Assert.Equal(key, compResponse.Key); Assert.NotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = await s3EncryptionClientFileMode.GetObjectAsync(getRequest); await getResponse.WriteResponseStreamToFileAsync(retrievedFilepath, false, System.Threading.CancellationToken.None); _filesToDelete.Add(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = await s3EncryptionClientFileMode.GetObjectMetadataAsync(metaDataRequest); Assert.Equal("text/html", metaDataResponse.Headers.ContentType); } }
/// <summary> /// Provides the actual implementation to move or copy an S3 object /// </summary> /// <param name="client"></param> /// <param name="request"></param> /// <param name="partSize"></param> /// <param name="deleteSource"></param> /// <returns></returns> private static async Task <CopyObjectRequestResponse> CopyOrMoveObjectAsync(this IAmazonS3 client, CopyObjectRequest request, long partSize, bool deleteSource, Func <long, long, bool> useMulitpart) { /// Handle operation cancelled exceptions ExponentialBackoffAndRetryClient backoffClient = new ExponentialBackoffAndRetryClient(4, 100, 1000) { ExceptionHandlingLogic = (ex) => { if (ex is OperationCanceledException) { return(true); } else { return(false); } } }; try { ParameterTests.NonNull(request, "request"); ParameterTests.OutOfRange(partSize >= Constants.MINIMUM_MULTIPART_PART_SIZE, "partSize", $"The part size must be at least {Constants.MINIMUM_MULTIPART_PART_SIZE} bytes."); ParameterTests.OutOfRange(partSize <= Constants.MAXIMUM_MULTIPART_PART_SIZE, "partSize", $"The part size cannot exceed {Constants.MAXIMUM_MULTIPART_PART_SIZE} bytes."); if (request.SourceKey == request.DestinationKey && request.SourceBucket != null && request.SourceBucket.Equals(request.DestinationBucket, StringComparison.OrdinalIgnoreCase)) { throw new SourceDestinationSameException("The source and destination of the copy operation cannot be the same.", new CopyObjectRequest[] { request }); } // Get the size of the object. GetObjectMetadataRequest metadataRequest = new GetObjectMetadataRequest { BucketName = request.SourceBucket, Key = request.SourceKey }; long objectSize; GetObjectMetadataResponse metadataResponse; try { metadataResponse = await backoffClient.RunAsync(() => client.GetObjectMetadataAsync(metadataRequest)); objectSize = metadataResponse.ContentLength; // Length in bytes. } catch (Exception e) { throw e; } CopyObjectResponse response = null; if (UseMultipart(objectSize, partSize)) { // If it takes more than a 5 GiB part to make 10000 or less parts, than this operation // isn't supported for an object this size if (objectSize / partSize > Constants.MAXIMUM_PARTS) { throw new NotSupportedException($"The object size, {objectSize}, cannot be broken into fewer than {Constants.MAXIMUM_PARTS} parts using a part size of {partSize} bytes."); } List <Task <CopyPartResponse> > copyResponses = new List <Task <CopyPartResponse> >(); // This property has a nullable backing private field that when set to // anything non-null causes the x-amz-object-lock-retain-until-date // header to be sent which in turn results in an exception being thrown // that the Bucket is missing ObjectLockConfiguration InitiateMultipartUploadRequest initiateRequest = request.ConvertTo <InitiateMultipartUploadRequest>("ObjectLockRetainUntilDate"); initiateRequest.BucketName = request.DestinationBucket; initiateRequest.Key = request.DestinationKey; InitiateMultipartUploadResponse initiateResponse = await backoffClient.RunAsync(() => client.InitiateMultipartUploadAsync(initiateRequest)); try { long bytePosition = 0; int counter = 1; // Launch all of the copy parts while (bytePosition < objectSize) { CopyPartRequest copyRequest = request.ConvertTo <CopyPartRequest>("ObjectLockRetainUntilDate"); copyRequest.UploadId = initiateResponse.UploadId; copyRequest.FirstByte = bytePosition; // If we're on the last part, the last byte is the object size minus 1, otherwise the last byte is the part size minus one // added to the current byte position copyRequest.LastByte = ((bytePosition + partSize - 1) >= objectSize) ? objectSize - 1 : bytePosition + partSize - 1; copyRequest.PartNumber = counter++; copyResponses.Add(backoffClient.RunAsync(() => client.CopyPartAsync(copyRequest))); bytePosition += partSize; } IEnumerable <CopyPartResponse> responses = (await Task.WhenAll(copyResponses)).OrderBy(x => x.PartNumber); // Set up to complete the copy. CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = request.DestinationBucket, Key = request.DestinationKey, UploadId = initiateResponse.UploadId }; completeRequest.AddPartETags(responses); // Complete the copy. CompleteMultipartUploadResponse completeUploadResponse = await backoffClient.RunAsync(() => client.CompleteMultipartUploadAsync(completeRequest)); response = completeUploadResponse.CopyProperties <CopyObjectResponse>(); response.SourceVersionId = metadataResponse.VersionId; } catch (AmazonS3Exception e) { AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest() { BucketName = request.DestinationBucket, Key = request.DestinationKey, UploadId = initiateResponse.UploadId }; await backoffClient.RunAsync(() => client.AbortMultipartUploadAsync(abortRequest)); throw e; } } else { response = await backoffClient.RunAsync(() => client.CopyObjectAsync(request)); } if (response.HttpStatusCode != HttpStatusCode.OK) { throw new AmazonS3Exception($"Could not copy object from s3://{request.SourceBucket}/{request.SourceKey} to s3://{request.DestinationBucket}/{request.DestinationKey}. Received response : {(int)response.HttpStatusCode}"); } else { // We already checked to make sure the source and destination weren't the same // and it's safe to delete the source object if (deleteSource) { DeleteObjectRequest deleteRequest = new DeleteObjectRequest() { BucketName = request.SourceBucket, Key = request.SourceKey }; DeleteObjectResponse deleteResponse = await backoffClient.RunAsync(() => client.DeleteObjectAsync(deleteRequest)); if (deleteResponse.HttpStatusCode != HttpStatusCode.NoContent) { throw new AmazonS3Exception($"Could not delete s3://{request.SourceBucket}/{request.SourceKey}. Received response : {(int)deleteResponse.HttpStatusCode}"); } } return(new CopyObjectRequestResponse(request, response)); } } catch (Exception e) { return(null); } }
/// <summary> /// This method uses the passed client object to perform a multi-part /// copy operation. /// </summary> /// <param name="client">An Amazon S3 client object that will be used /// to perform the copy.</param> public static async Task MPUCopyObjectAsync(AmazonS3Client client) { // Create a list to store the upload part responses. var uploadResponses = new List <UploadPartResponse>(); var copyResponses = new List <CopyPartResponse>(); // Setup information required to initiate the multipart upload. var initiateRequest = new InitiateMultipartUploadRequest { BucketName = TargetBucket, Key = TargetObjectKey, }; // Initiate the upload. InitiateMultipartUploadResponse initResponse = await client.InitiateMultipartUploadAsync(initiateRequest); // Save the upload ID. string uploadId = initResponse.UploadId; try { // Get the size of the object. var metadataRequest = new GetObjectMetadataRequest { BucketName = SourceBucket, Key = SourceObjectKey, }; GetObjectMetadataResponse metadataResponse = await client.GetObjectMetadataAsync(metadataRequest); var objectSize = metadataResponse.ContentLength; // Length in bytes. // Copy the parts. var partSize = 5 * (long)Math.Pow(2, 20); // Part size is 5 MB. long bytePosition = 0; for (int i = 1; bytePosition < objectSize; i++) { var copyRequest = new CopyPartRequest { DestinationBucket = TargetBucket, DestinationKey = TargetObjectKey, SourceBucket = SourceBucket, SourceKey = SourceObjectKey, UploadId = uploadId, FirstByte = bytePosition, LastByte = bytePosition + partSize - 1 >= objectSize ? objectSize - 1 : bytePosition + partSize - 1, PartNumber = i, }; copyResponses.Add(await client.CopyPartAsync(copyRequest)); bytePosition += partSize; } // Set up to complete the copy. var completeRequest = new CompleteMultipartUploadRequest { BucketName = TargetBucket, Key = TargetObjectKey, UploadId = initResponse.UploadId, }; completeRequest.AddPartETags(copyResponses); // Complete the copy. CompleteMultipartUploadResponse completeUploadResponse = await client.CompleteMultipartUploadAsync(completeRequest); } catch (AmazonS3Exception e) { Console.WriteLine($"Error encountered on server. Message:'{e.Message}' when writing an object"); } catch (Exception e) { Console.WriteLine($"Unknown encountered on server. Message:'{e.Message}' when writing an object"); } }
public void MultipartEncryptionTestInstructionFile() { string filePath = @"C:\temp\Upload15MegFileIn3PartsViaStream.txt"; string retrievedFilepath = @"C:\temp\Upload15MegFileIn3PartsViaStreamRetreived.txt"; int MEG_SIZE = (int)Math.Pow(2, 20) + 4001; long totalSize = (long)(15 * MEG_SIZE); UtilityMethods.GenerateFile(filePath, totalSize); string key = "MultipartEncryptionTestInstrcutionFile" + random.Next(); s3EncryptionClientFileMode.PutBucket(new PutBucketRequest() { BucketName = bucketName }); Stream inputStream = File.OpenRead(filePath); try { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.ReducedRedundancy, ContentType = "text/html", CannedACL = S3CannedACL.PublicRead }; InitiateMultipartUploadResponse initResponse = s3EncryptionClientFileMode.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MEG_SIZE, InputStream = inputStream }; UploadPartResponse up1Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MEG_SIZE + 4001, InputStream = inputStream }; UploadPartResponse up2Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; //uploadRequest.setLastPart(); UploadPartResponse up3Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = s3EncryptionClientFileMode.ListParts(listPartRequest); Assert.AreEqual(3, listPartResponse.Parts.Count); Assert.AreEqual(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.AreEqual(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.AreEqual(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.AreEqual(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.AreEqual(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.AreEqual(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = s3EncryptionClientFileMode.ListParts(listPartRequest); Assert.AreEqual(1, listPartResponse.Parts.Count); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = s3EncryptionClientFileMode.CompleteMultipartUpload(compRequest); Assert.AreEqual(bucketName, compResponse.BucketName); Assert.IsNotNull(compResponse.ETag); Assert.AreEqual(key, compResponse.Key); Assert.IsNotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = s3EncryptionClientFileMode.GetObject(getRequest); getResponse.WriteResponseStreamToFile(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = s3EncryptionClientFileMode.GetObjectMetadata(metaDataRequest); Assert.AreEqual("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Close(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } }
public void SSEMultipartUploadTest() { var testKey = "test.tst"; var size = 1 * 1024 * 1024; var data = new byte[size]; new Random().NextBytes(data); Aes aesEncryption = Aes.Create(); aesEncryption.KeySize = 256; aesEncryption.GenerateKey(); var encKey = Convert.ToBase64String(aesEncryption.Key); Assert.DoesNotThrowAsync(async() => { var initiateRequest = new InitiateMultipartUploadRequest { BucketName = TestBucket, Key = testKey, ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, ServerSideEncryptionCustomerProvidedKey = encKey }; var initResponse = await _s3Client.InitiateMultipartUploadAsync(initiateRequest); // Assert.IsNotNull(initResponse.ServerSideEncryptionCustomerMethod); // Assert.IsNotNull(initResponse.ServerSideEncryptionCustomerProvidedKeyMD5); try { UploadPartResponse uploadResponse; await using (var stream = new MemoryStream(data)) { UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = TestBucket, Key = testKey, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = size, FilePosition = 0, InputStream = stream, ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, ServerSideEncryptionCustomerProvidedKey = encKey, }; uploadResponse = await _s3Client.UploadPartAsync(uploadRequest); } CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = TestBucket, Key = testKey, UploadId = initResponse.UploadId }; completeRequest.AddPartETags(uploadResponse); var completeUploadResponse = await _s3Client.CompleteMultipartUploadAsync(completeRequest); } catch { var abortMPURequest = new AbortMultipartUploadRequest { BucketName = TestBucket, Key = testKey, UploadId = initResponse.UploadId }; await _s3Client.AbortMultipartUploadAsync(abortMPURequest); throw; } }); }
private static async Task UploadObjectAsync() { // Create list to store upload part responses. List <UploadPartResponse> uploadResponses = new List <UploadPartResponse>(); // Setup information required to initiate the multipart upload. InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest { BucketName = bucketName, Key = keyName }; // Initiate the upload. InitiateMultipartUploadResponse initResponse = await s3Client.InitiateMultipartUploadAsync(initiateRequest); // Upload parts. long contentLength = new FileInfo(filePath).Length; long partSize = 5 * (long)Math.Pow(2, 20); // 5 MB try { Console.WriteLine("Uploading parts"); long filePosition = 0; for (int i = 1; filePosition < contentLength; i++) { UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId, PartNumber = i, PartSize = partSize, FilePosition = filePosition, FilePath = filePath }; // Track upload progress. uploadRequest.StreamTransferProgress += new EventHandler <StreamTransferProgressArgs>(UploadPartProgressEventCallback); // Upload a part and add the response to our list. uploadResponses.Add(await s3Client.UploadPartAsync(uploadRequest)); filePosition += partSize; } // Setup to complete the upload. CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId }; completeRequest.AddPartETags(uploadResponses); // Complete the upload. CompleteMultipartUploadResponse completeUploadResponse = await s3Client.CompleteMultipartUploadAsync(completeRequest); } catch (Exception exception) { Console.WriteLine("An AmazonS3Exception was thrown: { 0}", exception.Message); // Abort the upload. AbortMultipartUploadRequest abortMPURequest = new AbortMultipartUploadRequest { BucketName = bucketName, Key = keyName, UploadId = initResponse.UploadId }; await s3Client.AbortMultipartUploadAsync(abortMPURequest); } }
public override async Task <long> AppendToUploadAsync(Upload upload, Stream stream, CancellationToken cancellationToken) { try { if (!upload.BlockNumber.HasValue) { upload.BlockNumber = 0; } var blobName = upload.Id + upload.Extension; if (upload.Length == upload.UploadedLength) { return(0); } int bytesRead = 0; long bytesWritten = 0; if (upload.ProviderUploadId == null) { // Setup information required to initiate the multipart upload. var initiateRequest = new InitiateMultipartUploadRequest { BucketName = _bucket, Key = blobName, }; // Initiate the upload. InitiateMultipartUploadResponse initResponse = await _s3.InitiateMultipartUploadAsync(initiateRequest, cancellationToken); upload.ProviderUploadId = initResponse.UploadId; } do { if (cancellationToken.IsCancellationRequested) { _logger.LogDebug("Request to append cancelled for file '{id}'", blobName); break; } var buffer = new byte[blockSize]; // Checking for last block (it will never reach 5MB exactly) int lastBytesRead = 0; bytesRead = 0; do { lastBytesRead = await stream.ReadAsync( buffer, bytesRead, // Ensure we don't overread the buffer blockSize - bytesRead, cancellationToken); bytesRead += lastBytesRead; } while (bytesRead < blockSize && lastBytesRead > 0); if (bytesRead == 0) { break; } using (MemoryStream memoryBufferStream = new MemoryStream(buffer, 0, bytesRead)) { var uploadPartRequest = new UploadPartRequest { BucketName = _bucket, Key = blobName, UploadId = upload.ProviderUploadId, PartNumber = upload.BlockNumber.Value + 1, // Amazon S3 part uploads start at one. PartSize = bytesRead, InputStream = memoryBufferStream }; var result = await _s3.UploadPartAsync(uploadPartRequest, cancellationToken); PartETag partETag = new PartETag(result.PartNumber, result.ETag); string serialised = JsonConvert.SerializeObject(partETag); string eTag = Convert.ToBase64String(Encoding.UTF8.GetBytes(serialised)); upload.BlockIds += $"{eTag} "; } bytesWritten += bytesRead; upload.BlockNumber++; upload.UploadedLength += bytesRead; _logger.LogDebug("Read bytes {bytesRead}, written {bytesWritten}, block number {blockNumber} on file {fileId}", bytesRead, bytesWritten, upload.BlockNumber, blobName); // note: cancellation token *not* supplied as this must finish because we have sucessfully uploaded the latest block to S3. await DbContext.SaveChangesAsync(); } while (bytesRead != 0); if (upload.Length == upload.UploadedLength) { var blockIds = upload.BlockIds.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); var completeRequest = new CompleteMultipartUploadRequest { BucketName = _bucket, Key = blobName, UploadId = upload.ProviderUploadId, }; List <PartETag> partETags = new List <PartETag>(); foreach (var blockId in blockIds) { string serialised = Encoding.UTF8.GetString(Convert.FromBase64String(blockId)); PartETag partETag = JsonConvert.DeserializeObject <PartETag>(serialised); partETags.Add(partETag); } completeRequest.AddPartETags(partETags); await _s3.CompleteMultipartUploadAsync(completeRequest); await DbContext.SaveChangesAsync(); } return(bytesWritten); } catch (Exception e) { _logger.LogError(e, "Failed to append data"); throw; } }
private static async Task UploadFileAsync(string filePath) { try { List <UploadPartResponse> uploadResponses = new List <UploadPartResponse>(); var j = new FileInfo(filePath); //do some logic... var uploadToApi = await ffApi.GetUploadOnFileName(j.Name); string uploadId = uploadToApi.S3Id; if (uploadToApi.Id == null || uploadToApi.Status == UploadType.Uploaded) { string key = generateID(); Amazon.S3.Model.InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest { BucketName = bucketName, Key = key, CannedACL = S3CannedACL.AuthenticatedRead }; request.Metadata.Add("fname", j.Name); request.Metadata.Add("transfer-created", DateTime.UtcNow.ToString()); request.Metadata.Add("up-version", "0.1;closed"); InitiateMultipartUploadResponse response = await s3Client.InitiateMultipartUploadAsync(request); uploadId = response.UploadId; uploadToApi = await ffApi.CreateUpload(new Upload { filename = j.Name, S3Id = uploadId, Key = key, Status = UploadType.Initialized }); } Console.WriteLine("uploadid: " + uploadId); // Upload parts. long contentLength = new FileInfo(filePath).Length; long partSize = 50000000; // 50 mb Console.WriteLine("part size: " + partSize + " bytes"); Console.WriteLine("Finding parts"); //first we check for the parts.. ListPartsRequest listPartsRequest = new ListPartsRequest { BucketName = bucketName, Key = uploadToApi.Key, UploadId = uploadId }; var listParts = await s3Client.ListPartsAsync(listPartsRequest); int currentPart = Math.Max(listParts.NextPartNumberMarker, 0); long filePosition = currentPart * partSize; for (int i = currentPart + 1; filePosition < contentLength; i++) { Console.WriteLine("Uploading part index: " + i); UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = bucketName, Key = uploadToApi.Key, UploadId = uploadId, PartNumber = i, PartSize = partSize, FilePosition = filePosition, FilePath = filePath }; // Track upload progress. uploadRequest.StreamTransferProgress += new EventHandler <StreamTransferProgressArgs>(UploadPartProgressEventCallback); // Upload a part and add the response to our list. var res = await s3Client.UploadPartAsync(uploadRequest); filePosition += partSize; } // Setup to complete the upload. listParts = await s3Client.ListPartsAsync(listPartsRequest); List <PartETag> t = new List <PartETag>(); foreach (var p in listParts.Parts) { t.Add(new PartETag { ETag = p.ETag, PartNumber = p.PartNumber }); } CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = bucketName, Key = uploadToApi.Key, UploadId = uploadId }; completeRequest.AddPartETags(t); // Complete the upload. CompleteMultipartUploadResponse completeUploadResponse = await s3Client.CompleteMultipartUploadAsync(completeRequest); uploadToApi.Status = UploadType.Uploaded; var updated = await ffApi.UpdateUpload(uploadToApi, uploadId); } catch (Exception x) { Log.Error(x.ToString()); } }
private static async Task CreateSampleObjUsingClientEncryptionKeyAsync(string base64Key, IAmazonS3 s3Client) { // List to store upload part responses. List <UploadPartResponse> uploadResponses = new List <UploadPartResponse>(); // 1. Initialize. InitiateMultipartUploadRequest initiateRequest = new InitiateMultipartUploadRequest { BucketName = existingBucketName, Key = sourceKeyName, ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, ServerSideEncryptionCustomerProvidedKey = base64Key }; InitiateMultipartUploadResponse initResponse = await s3Client.InitiateMultipartUploadAsync(initiateRequest); // 2. Upload Parts. long contentLength = new FileInfo(filePath).Length; long partSize = 5 * (long)Math.Pow(2, 20); // 5 MB try { long filePosition = 0; for (int i = 1; filePosition < contentLength; i++) { UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = existingBucketName, Key = sourceKeyName, UploadId = initResponse.UploadId, PartNumber = i, PartSize = partSize, FilePosition = filePosition, FilePath = filePath, ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.AES256, ServerSideEncryptionCustomerProvidedKey = base64Key }; // Upload part and add response to our list. uploadResponses.Add(await s3Client.UploadPartAsync(uploadRequest)); filePosition += partSize; } // Step 3: complete. CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest { BucketName = existingBucketName, Key = sourceKeyName, UploadId = initResponse.UploadId, //PartETags = new List<PartETag>(uploadResponses) }; completeRequest.AddPartETags(uploadResponses); CompleteMultipartUploadResponse completeUploadResponse = await s3Client.CompleteMultipartUploadAsync(completeRequest); } catch (Exception exception) { Console.WriteLine("Exception occurred: {0}", exception.Message); AbortMultipartUploadRequest abortMPURequest = new AbortMultipartUploadRequest { BucketName = existingBucketName, Key = sourceKeyName, UploadId = initResponse.UploadId }; await s3Client.AbortMultipartUploadAsync(abortMPURequest); } }