public string EndMultiPartUpload(long archiveSize, string checksum, string uploadId) { CompleteMultipartUploadRequest request = new CompleteMultipartUploadRequest() { ArchiveSize = archiveSize.ToString(), Checksum = checksum, UploadId = uploadId, VaultName = _vault }; CompleteMultipartUploadResponse response = _amazonGlacierClient.CompleteMultipartUpload(request); return(response.CompleteMultipartUploadResult.ArchiveId); }
// After each file is uploaded it will return an ArchiveID static string CompleteMPU(string uploadID, AmazonGlacierClient client, List <string> partChecksumList, string archiveToUpload) { long fileLength = new FileInfo(archiveToUpload).Length; CompleteMultipartUploadRequest completeMPUrequest = new CompleteMultipartUploadRequest() { UploadId = uploadID, ArchiveSize = fileLength.ToString(), Checksum = TreeHashGenerator.CalculateTreeHash(partChecksumList), VaultName = vaultName }; CompleteMultipartUploadResponse completeMPUresponse = client.CompleteMultipartUpload(completeMPUrequest); return(completeMPUresponse.ArchiveId); }
public void GlacierCompleteMultipartUpload() { #region 272aa0b8-e44c-4a64-add2-ad905a37984d var client = new AmazonGlacierClient(); var response = client.CompleteMultipartUpload(new CompleteMultipartUploadRequest { AccountId = "-", ArchiveSize = "3145728", Checksum = "9628195fcdbcbbe76cdde456d4646fa7de5f219fb39823836d81f0cc0e18aa67", UploadId = "19gaRezEXAMPLES6Ry5YYdqthHOC_kGRCT03L9yetr220UmPtBYKk-OssZtLqyFu7sY1_lR7vgFuJV6NtcV5zpsJ", VaultName = "my-vault" }); string archiveId = response.ArchiveId; string checksum = response.Checksum; string location = response.Location; #endregion }
public static void Upload(string vaultName, string fileToUpload, Amazon.RegionEndpoint awsRegion) { try { if (string.IsNullOrEmpty(vaultName)) { throw new Exception("No vault specified"); } if (string.IsNullOrEmpty(fileToUpload) || !File.Exists(fileToUpload)) { throw new Exception($"Invalid file '{fileToUpload}'"); } // Verify that the dynamo table exists InitializeDynamo(awsRegion, dynamoTableName); string archiveName = Path.GetFileName(fileToUpload); fileSize = new FileInfo(fileToUpload).Length; using (var glacier = new AmazonGlacierClient(awsRegion)) using (FileStream fs = File.OpenRead(fileToUpload)) using (SHA256Managed sha = new SHA256Managed()) { // Do the SHA256 hash in a background worker to avoid blocking ThreadPool.QueueUserWorkItem(unused => { byte[] lastBuffer = new byte[] { }; while (!buffersDone || !hashBuffers.IsEmpty) { Tuple <byte[], int> chunkTohash; if (hashBuffers.TryDequeue(out chunkTohash)) { sha.TransformBlock(chunkTohash.Item1, 0, chunkTohash.Item2, null, 0); lastBuffer = chunkTohash.Item1; } Thread.Sleep(10); } sha.TransformFinalBlock(lastBuffer, 0, 0); hashComplete = true; }); long partSize = 128 * 1024 * 1024; var initUploadRequest = new Amazon.Glacier.Model.InitiateMultipartUploadRequest(); initUploadRequest.ArchiveDescription = archiveName; initUploadRequest.PartSize = partSize; initUploadRequest.VaultName = vaultName; var initResponse = glacier.InitiateMultipartUpload(initUploadRequest); long position = 0; fs.Seek(0, SeekOrigin.Begin); List <byte[]> treeHashes = new List <byte[]>(); while (true) { byte[] buffer = new byte[partSize]; int bytesRead = fs.Read(buffer, 0, (int)partSize); if (bytesRead == 0) { break; } using (MemoryStream ms = new MemoryStream(buffer)) { ms.Seek(0, SeekOrigin.Begin); ms.SetLength(bytesRead); byte[] treeHash = HashUtil.ComputeSHA256TreeHash(buffer, bytesRead); treeHashes.Add(treeHash); ms.Seek(0, SeekOrigin.Begin); var uploadRequest = new Amazon.Glacier.Model.UploadMultipartPartRequest(); uploadRequest.Body = ms; uploadRequest.UploadId = initResponse.UploadId; uploadRequest.VaultName = vaultName; uploadRequest.StreamTransferProgress += OnTransferProgress; uploadRequest.Checksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); long firstByte = position; long lastByte = position + bytesRead - 1; uploadRequest.Range = $"bytes {firstByte}-{lastByte}/{fileSize}"; var uploadResponse = glacier.UploadMultipartPart(uploadRequest); } hashBuffers.Enqueue(new Tuple <byte[], int>(buffer, bytesRead)); position += bytesRead; transferredBytes += bytesRead; } buffersDone = true; while (!hashComplete) { Thread.Sleep(10); } var completeUploadRequest = new Amazon.Glacier.Model.CompleteMultipartUploadRequest(); completeUploadRequest.ArchiveSize = fileSize.ToString(); completeUploadRequest.UploadId = initResponse.UploadId; completeUploadRequest.VaultName = vaultName; byte[] fullTreeHash = HashUtil.ComputeSHA256TreeHash(treeHashes.ToArray()); completeUploadRequest.Checksum = BitConverter.ToString(fullTreeHash).Replace("-", "").ToLower(); var completeUploadResponse = glacier.CompleteMultipartUpload(completeUploadRequest); string fileHash = BitConverter.ToString(sha.Hash).Replace("-", String.Empty); Console.WriteLine("File hash: " + fileHash); WriteArchiveToDynamo(completeUploadResponse.ArchiveId, awsRegion, vaultName, archiveName, fileSize, fileHash, completeUploadResponse.Location); Console.WriteLine("Copy and save the following Archive ID for the next step."); Console.WriteLine("Archive ID: {0}", initResponse.UploadId); Console.WriteLine("To continue, press Enter"); Console.ReadKey(); } } catch (AmazonGlacierException e) { Console.WriteLine(e.Message); } catch (AmazonServiceException e) { Console.WriteLine(e.Message); } catch (Exception e) { Console.WriteLine(e.Message); } Console.ReadKey(); }