public override AmazonWebServiceResponse Unmarshall(JsonUnmarshallerContext context) { GetJobOutputResponse response = new GetJobOutputResponse(); if (context.ResponseData.GetHeaderValue("x-amz-sha256-tree-hash") != null) { response.Checksum = context.ResponseData.GetHeaderValue("x-amz-sha256-tree-hash"); } if (context.ResponseData.GetHeaderValue("Content-Range") != null) { response.ContentRange = context.ResponseData.GetHeaderValue("Content-Range"); } if (context.ResponseData.GetHeaderValue("Accept-Ranges") != null) { response.AcceptRanges = context.ResponseData.GetHeaderValue("Accept-Ranges"); } if (context.ResponseData.GetHeaderValue("Content-Type") != null) { response.ContentType = context.ResponseData.GetHeaderValue("Content-Type"); } if (context.ResponseData.GetHeaderValue("x-amz-archive-description") != null) { response.ArchiveDescription = context.ResponseData.GetHeaderValue("x-amz-archive-description"); } response.Status = (int)context.ResponseData.StatusCode; response.Body = context.Stream; return(response); }
public override AmazonWebServiceResponse Unmarshall(JsonUnmarshallerContext context) { GetJobOutputResponse response = new GetJobOutputResponse(); UnmarshallResult(context, response); return(response); }
private static void UnmarshallResult(JsonUnmarshallerContext context, GetJobOutputResponse response) { if (context.ResponseData.GetHeaderValue("x-amz-sha256-tree-hash") != null) { response.Checksum = context.ResponseData.GetHeaderValue("x-amz-sha256-tree-hash"); } if (context.ResponseData.GetHeaderValue("Content-Range") != null) { response.ContentRange = context.ResponseData.GetHeaderValue("Content-Range"); } if (context.ResponseData.GetHeaderValue("Accept-Ranges") != null) { response.AcceptRanges = context.ResponseData.GetHeaderValue("Accept-Ranges"); } if (context.ResponseData.GetHeaderValue("Content-Type") != null) { response.ContentType = context.ResponseData.GetHeaderValue("Content-Type"); } if (context.ResponseData.GetHeaderValue("x-amz-archive-description") != null) { response.ArchiveDescription = context.ResponseData.GetHeaderValue("x-amz-archive-description"); } response.Status = (int)context.ResponseData.StatusCode; response.Body = context.Stream; return; }
public override AmazonWebServiceResponse Unmarshall(JsonUnmarshallerContext context) { GetJobOutputResponse response = new GetJobOutputResponse(); response.GetJobOutputResult = GetJobOutputResultUnmarshaller.GetInstance().Unmarshall(context); return(response); }
public ArchivePartInfo DownloadArchivePart(string jobId, long start, long end) { GetJobOutputRequest downloadRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = _vault }; downloadRequest.SetRange(start, end); GetJobOutputResponse response = _amazonGlacierClient.GetJobOutput(downloadRequest); GetJobOutputResult result = response.GetJobOutputResult; ArchivePartInfo info = new ArchivePartInfo(result.Body, result.Checksum); return(info); }
private static void DownloadOutput(string jobId, AmazonGlacierClient client, Amazon.RegionEndpoint region) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = s_vaultName }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.OpenWrite(s_filePath)) { CopyStream(webStream, fileToSave); } } }
private static void DownloadOutput(string vaultName, string jobId, AmazonGlacierClient client, string outputPath) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = vaultName }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.Open(outputPath, FileMode.Create)) { CopyStream(webStream, fileToSave); } } }
private static void DownloadOutput(string jobId, AmazonGlacier client, AWSArchiveRquest request) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = vaultName }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); GetJobOutputResult result = getJobOutputResponse.GetJobOutputResult; using (Stream webStream = result.Body) { using (Stream fileToSave = File.OpenWrite(request.Description)) { CopyStream(webStream, fileToSave); } } }
private static void GetInventory(string jobId, string vaultName, string filename) { using (IAmazonGlacier client = new AmazonGlacierClient(RegionEndpoint.EUWest1)) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = vaultName, }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.OpenWrite(filename)) { CopyStream(webStream, fileToSave); } } } }
public byte[] GetVaultInventory(string jobId) { GetJobOutputRequest inventoryRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = _vault }; GetJobOutputResponse response = _amazonGlacierClient.GetJobOutput(inventoryRequest); using (Stream webStream = response.GetJobOutputResult.Body) { List <byte> data = new List <byte>(); int newByte; while ((newByte = webStream.ReadByte()) != -1) { data.Add((byte)newByte); } return(data.ToArray()); } }
internal void Execute() { long contentLength = -1; string glacierProvidedCheckSum = null; string rangeValue = null; Stream input = null; Stream output = null; try { // Make sure the directory exists to write too. FileInfo fi = new FileInfo(filePath); Directory.CreateDirectory(fi.DirectoryName); FileMode fileMode = FileMode.Create; int retryAttempts = 0; byte[] buffer = new byte[1024 * 1024 * 5]; long transferredBytes = 0; MemoryStream partStream = new MemoryStream(new byte[PART_STREAM_HASH_SIZE]); LinkedList <string> hashes = new LinkedList <string>(); while (true) { try { output = File.Open(filePath, fileMode, FileAccess.Write, FileShare.None); try { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { AccountId = this.options.AccountId, VaultName = this.vaultName, JobId = jobId, Range = rangeValue }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)getJobOutputRequest).AddBeforeRequestHandler(new ArchiveTransferManager.UserAgentPostFix("DownloadArchive").UserAgentRequestEventHandlerSync); GetJobOutputResponse jobOutputResponse = this.manager.GlacierClient.GetJobOutput(getJobOutputRequest); if (contentLength < 0) { contentLength = jobOutputResponse.ContentLength; glacierProvidedCheckSum = jobOutputResponse.Checksum; } input = new BufferedStream(jobOutputResponse.Body); long totalBytesFromGetJobOutput = jobOutputResponse.ContentLength; long bytesReadFromGetJobOutput = 0; int bytesRead = 0; do { bytesRead = input.Read(buffer, 0, buffer.Length); if (bytesRead > 0) { bytesReadFromGetJobOutput += bytesRead; output.Write(buffer, 0, bytesRead); transferredBytes += bytesRead; int offset = 0; if (partStream.Position + bytesRead > PART_STREAM_HASH_SIZE) { var length = PART_STREAM_HASH_SIZE - (int)partStream.Position; partStream.Write(buffer, 0, length); offset = length; } else { partStream.Write(buffer, 0, bytesRead); offset = bytesRead; } if (partStream.Position == PART_STREAM_HASH_SIZE) { partStream.Position = 0; hashes.AddLast(TreeHashGenerator.CalculateTreeHash(partStream)); } if (offset != bytesRead) { partStream.Write(buffer, offset, bytesRead - offset); } // Make callback on progress AWSSDKUtils.InvokeInBackground( this.options.StreamTransferProgress, new Runtime.StreamTransferProgressArgs(bytesRead, transferredBytes, contentLength), this.manager); } if (retryAttempts > 0) { retryAttempts = 0; // Reset retry attempts back to 0 since we able to successfully write more data to disk. } } while (bytesReadFromGetJobOutput < totalBytesFromGetJobOutput); // Compute hash of the last remaining bytes if (partStream.Position != 0) { partStream.SetLength(partStream.Position); partStream.Position = 0; hashes.AddLast(TreeHashGenerator.CalculateTreeHash(partStream)); } break; } finally { output.Close(); output = null; try { if (input != null) { input.Close(); } } catch (Exception) { } } } catch (Exception e) { var age = e as AmazonGlacierException; if (age != null && age.StatusCode == HttpStatusCode.NotFound) { throw; } fileMode = FileMode.Append; rangeValue = string.Format(CultureInfo.InvariantCulture, "bytes={0}-", new FileInfo(filePath).Length); retryAttempts++; if (retryAttempts <= DownloadFileCommand.MAX_OPERATION_RETRY) { Console.WriteLine("Error and going to retry: {0}", e.Message); Console.WriteLine(e.StackTrace); Thread.Sleep(60 * 1000); } else { throw; } } } // If the job output is a vault inventory then Glacier does not return back a tree hash. if (!string.IsNullOrEmpty(glacierProvidedCheckSum)) { var computedCheckSum = TreeHashGenerator.CalculateTreeHash(hashes); if (!string.Equals(glacierProvidedCheckSum, computedCheckSum, StringComparison.OrdinalIgnoreCase)) { throw new AmazonGlacierException("Checksum of the downloaded file does not match the checksum reported by Amazon Glacier."); } } } catch (IOException e) { throw new IOException("Unable to save archive to disk", e); } finally { try { if (input != null) { input.Close(); } } catch (Exception) { } try { if (output != null) { output.Close(); } } catch (Exception) { } } }