public async Task <string> DownloadInventoryAsync(string jobId, CancellationToken token) { var describeReq = new DescribeJobRequest(Vault, jobId); var describeResult = await client.DescribeJobAsync(describeReq, token); var ok = false; while (!ok) // while job incompleted { if (describeResult.Completed) { ok = true; } else { await Task.Delay(Timeout, token); describeResult = await client.DescribeJobAsync(describeReq, token); } } var req = new GetJobOutputRequest(Vault, jobId, null); var result = await client.GetJobOutputAsync(req, token); using (var reader = new StreamReader(result.Body)) { return(await reader.ReadToEndAsync()); } //using (var output = new MemoryStream()) //{ // await Common.CopyStreamAsync(result.Body, output, null, result.ContentLength); // var reader = new StreamReader(output); // ReadFromJson(reader); //} }
internal GetJobOutputResponse GetJobOutput(GetJobOutputRequest request) { var marshaller = new GetJobOutputRequestMarshaller(); var unmarshaller = GetJobOutputResponseUnmarshaller.Instance; return(Invoke <GetJobOutputRequest, GetJobOutputResponse>(request, marshaller, unmarshaller)); }
public static void DownloadGlacierJobOutput( string jobId, AmazonGlacierClient client, string vaultName, string filePath) { try { var getJobOutputRequest = new GetJobOutputRequest { JobId = jobId, VaultName = vaultName }; var getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.Create(filePath)) { CopyStream(webStream, fileToSave); } } } catch (Exception ex) { Debug.WriteLine(ex.Message); throw ex; } }
/// <summary> /// Initiates the asynchronous execution of the GetJobOutput operation. /// <seealso cref="Amazon.Glacier.IAmazonGlacier"/> /// </summary> /// /// <param name="request">Container for the necessary parameters to execute the GetJobOutput operation.</param> /// <param name="cancellationToken"> /// A cancellation token that can be used by other objects or threads to receive notice of cancellation. /// </param> /// <returns>The task object representing the asynchronous operation.</returns> public Task <GetJobOutputResponse> GetJobOutputAsync(GetJobOutputRequest request, System.Threading.CancellationToken cancellationToken = default(CancellationToken)) { var marshaller = new GetJobOutputRequestMarshaller(); var unmarshaller = GetJobOutputResponseUnmarshaller.Instance; return(InvokeAsync <GetJobOutputRequest, GetJobOutputResponse>(request, marshaller, unmarshaller, cancellationToken)); }
public byte[] DownloadFile(string jobId) { var getJobRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = this.VaultName }; var getJobResponse = Service.GetJobOutput(getJobRequest); var stream = getJobResponse.Body; return(GetBytes(stream)); }
void DownloadOutput(string jobId, AmazonGlacierClient client) { var getJobOutputRequest = new GetJobOutputRequest { JobId = jobId, VaultName = vaultName }; var getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.OpenWrite(this.fileName)) { CopyStream(webStream, fileToSave); } } }
public async Task <IActionResult> GetInventoryOutput(string jobId) { GetJobOutputRequest request = new GetJobOutputRequest { VaultName = S3Settings.VaultName, JobId = jobId }; var response = await GlacierClient.GetJobOutputAsync(request); StreamReader reader = new StreamReader(response.Body); string text = reader.ReadToEnd(); return new JsonResult(JsonConvert.DeserializeObject(text)); }
/// <summary> /// Creates the range formatted string and set the Range property. /// </summary> /// <param name="request"></param> /// <param name="start">The start of the range.</param> /// <param name="end">The end of the range. This can be null which would return the data to the end.</param> public static void SetRange(this GetJobOutputRequest request, long start, long?end) { string range; if (end == null) { range = string.Format(CultureInfo.InvariantCulture, "bytes={0}-", start); } else { range = string.Format(CultureInfo.InvariantCulture, "bytes={0}-{1}", start, end.Value); } request.Range = range; }
private static void DownloadOutput(string jobId, AmazonGlacierClient client, Amazon.RegionEndpoint region) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = s_vaultName }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.OpenWrite(s_filePath)) { CopyStream(webStream, fileToSave); } } }
public ArchivePartInfo DownloadArchivePart(string jobId, long start, long end) { GetJobOutputRequest downloadRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = _vault }; downloadRequest.SetRange(start, end); GetJobOutputResponse response = _amazonGlacierClient.GetJobOutput(downloadRequest); GetJobOutputResult result = response.GetJobOutputResult; ArchivePartInfo info = new ArchivePartInfo(result.Body, result.Checksum); return(info); }
public override async Task DownloadFileAsync(StorageFile file, Stream output, CancellationToken token, Action <TransferProgress> progressCallback) { if (file.IsFolder) { throw new NotSupportedException("Glacier is not supported directories"); } string jobId; if (((GlacierFile)file).JobId == null) { var initReq = new InitiateJobRequest(Vault, new JobParameters { ArchiveId = file.Id, Type = "archive-retrieval" }); var initResult = await client.InitiateJobAsync(initReq, token); jobId = initResult.JobId; ((GlacierFile)file).JobId = jobId; } else { jobId = ((GlacierFile)file).JobId; } var describeReq = new DescribeJobRequest(Vault, jobId); var describeResult = await client.DescribeJobAsync(describeReq, token); var ok = false; while (!ok) // while job incompleted { if (describeResult.Completed) { ok = true; } else { await Task.Delay(Timeout, token); describeResult = await client.DescribeJobAsync(describeReq, token); } } var req = new GetJobOutputRequest(Vault, jobId, null); var result = await client.GetJobOutputAsync(req, token); await Common.CopyStreamAsync(result.Body, output, null, result.ContentLength); }
private static void DownloadOutput(string jobId, AmazonGlacier client, AWSArchiveRquest request) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = vaultName }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); GetJobOutputResult result = getJobOutputResponse.GetJobOutputResult; using (Stream webStream = result.Body) { using (Stream fileToSave = File.OpenWrite(request.Description)) { CopyStream(webStream, fileToSave); } } }
private static void DownloadOutput(string vaultName, string jobId, AmazonGlacierClient client, string outputPath) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = vaultName }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.Open(outputPath, FileMode.Create)) { CopyStream(webStream, fileToSave); } } }
private static void HandleStatus(string[] args) { int interval; var region = RegionEndpoint.EnumerableAllRegions.SingleOrDefault(reg => reg.SystemName == args[2]); if (args.Length < 7 || !int.TryParse(args[5], out interval) || region == null) { Console.WriteLine("args should be aws_key aws_secret region vault_name job_id interval_secs output_filename"); return; } var aws_key = args[0]; var aws_secret = args[1]; var vault_name = args[3]; var job_id = args[4]; var filename = args[6]; var creds = new BasicAWSCredentials(aws_key, aws_secret); var config = new AmazonGlacierConfig { RegionEndpoint = region, Timeout = TimeSpan.FromDays(10) }; var client = new AmazonGlacierClient(creds, config); var descReq = new DescribeJobRequest(vault_name, job_id); do { Console.WriteLine("Checking status..."); var jobStatus = client.DescribeJobAsync(descReq).Result; if (jobStatus.Completed) { Console.WriteLine("Job completed."); break; } Console.WriteLine($"Job incomplete."); Console.WriteLine($"Job status: {jobStatus.StatusCode}"); Thread.Sleep(interval); } while (true); var retrReq = new GetJobOutputRequest(vault_name, job_id, "bytes=0-1073741824"); var retrievalPromise = client.GetJobOutputAsync(retrReq).Result; var json = new StreamReader(retrievalPromise.Body).ReadToEnd(); File.WriteAllText(filename, json); Console.WriteLine($"Output written to {filename}"); }
private static void GetInventory(string jobId, string vaultName, string filename) { using (IAmazonGlacier client = new AmazonGlacierClient(RegionEndpoint.EUWest1)) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = vaultName, }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); using (Stream webStream = getJobOutputResponse.Body) { using (Stream fileToSave = File.OpenWrite(filename)) { CopyStream(webStream, fileToSave); } } } }
public byte[] GetVaultInventory(string jobId) { GetJobOutputRequest inventoryRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = _vault }; GetJobOutputResponse response = _amazonGlacierClient.GetJobOutput(inventoryRequest); using (Stream webStream = response.GetJobOutputResult.Body) { List <byte> data = new List <byte>(); int newByte; while ((newByte = webStream.ReadByte()) != -1) { data.Add((byte)newByte); } return(data.ToArray()); } }
public async Task <IActionResult> RestoreArchive(string jobId) { GetJobOutputRequest request = new GetJobOutputRequest { VaultName = S3Settings.VaultName, JobId = jobId }; var response = GlacierClient.GetJobOutput(request); ArchiveDescription description = JsonConvert.DeserializeObject <ArchiveDescription>(response.ArchiveDescription); // AWS HashStream doesn't support seeking so we need to copy it back to a MemoryStream MemoryStream outputStream = new MemoryStream(); response.Body.CopyTo(outputStream); ImageUploadedModel model = await ImageStore.UploadImage( S3Settings.OriginalBucketName, S3Settings.OriginalBucketUrl, description.ObjectKey, S3StorageClass.StandardInfrequentAccess, S3CannedACL.Private, null, new ImageInfo { MimeType = description.ContentType, Width = description.Width, Height = description.Height, Image = outputStream } ); return Created(model.ObjectLocation, model); }
internal void Execute() { long contentLength = -1; string glacierProvidedCheckSum = null; string rangeValue = null; Stream input = null; Stream output = null; try { // Make sure the directory exists to write too. FileInfo fi = new FileInfo(filePath); Directory.CreateDirectory(fi.DirectoryName); FileMode fileMode = FileMode.Create; int retryAttempts = 0; byte[] buffer = new byte[1024 * 1024 * 5]; long transferredBytes = 0; MemoryStream partStream = new MemoryStream(new byte[PART_STREAM_HASH_SIZE]); LinkedList <string> hashes = new LinkedList <string>(); while (true) { try { output = File.Open(filePath, fileMode, FileAccess.Write, FileShare.None); try { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { AccountId = this.options.AccountId, VaultName = this.vaultName, JobId = jobId, Range = rangeValue }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)getJobOutputRequest).AddBeforeRequestHandler(new ArchiveTransferManager.UserAgentPostFix("DownloadArchive").UserAgentRequestEventHandlerSync); GetJobOutputResponse jobOutputResponse = this.manager.GlacierClient.GetJobOutput(getJobOutputRequest); if (contentLength < 0) { contentLength = jobOutputResponse.ContentLength; glacierProvidedCheckSum = jobOutputResponse.Checksum; } input = new BufferedStream(jobOutputResponse.Body); long totalBytesFromGetJobOutput = jobOutputResponse.ContentLength; long bytesReadFromGetJobOutput = 0; int bytesRead = 0; do { bytesRead = input.Read(buffer, 0, buffer.Length); if (bytesRead > 0) { bytesReadFromGetJobOutput += bytesRead; output.Write(buffer, 0, bytesRead); transferredBytes += bytesRead; int offset = 0; if (partStream.Position + bytesRead > PART_STREAM_HASH_SIZE) { var length = PART_STREAM_HASH_SIZE - (int)partStream.Position; partStream.Write(buffer, 0, length); offset = length; } else { partStream.Write(buffer, 0, bytesRead); offset = bytesRead; } if (partStream.Position == PART_STREAM_HASH_SIZE) { partStream.Position = 0; hashes.AddLast(TreeHashGenerator.CalculateTreeHash(partStream)); } if (offset != bytesRead) { partStream.Write(buffer, offset, bytesRead - offset); } // Make callback on progress AWSSDKUtils.InvokeInBackground( this.options.StreamTransferProgress, new Runtime.StreamTransferProgressArgs(bytesRead, transferredBytes, contentLength), this.manager); } if (retryAttempts > 0) { retryAttempts = 0; // Reset retry attempts back to 0 since we able to successfully write more data to disk. } } while (bytesReadFromGetJobOutput < totalBytesFromGetJobOutput); // Compute hash of the last remaining bytes if (partStream.Position != 0) { partStream.SetLength(partStream.Position); partStream.Position = 0; hashes.AddLast(TreeHashGenerator.CalculateTreeHash(partStream)); } break; } finally { output.Close(); output = null; try { if (input != null) { input.Close(); } } catch (Exception) { } } } catch (Exception e) { var age = e as AmazonGlacierException; if (age != null && age.StatusCode == HttpStatusCode.NotFound) { throw; } fileMode = FileMode.Append; rangeValue = string.Format(CultureInfo.InvariantCulture, "bytes={0}-", new FileInfo(filePath).Length); retryAttempts++; if (retryAttempts <= DownloadFileCommand.MAX_OPERATION_RETRY) { Console.WriteLine("Error and going to retry: {0}", e.Message); Console.WriteLine(e.StackTrace); Thread.Sleep(60 * 1000); } else { throw; } } } // If the job output is a vault inventory then Glacier does not return back a tree hash. if (!string.IsNullOrEmpty(glacierProvidedCheckSum)) { var computedCheckSum = TreeHashGenerator.CalculateTreeHash(hashes); if (!string.Equals(glacierProvidedCheckSum, computedCheckSum, StringComparison.OrdinalIgnoreCase)) { throw new AmazonGlacierException("Checksum of the downloaded file does not match the checksum reported by Amazon Glacier."); } } } catch (IOException e) { throw new IOException("Unable to save archive to disk", e); } finally { try { if (input != null) { input.Close(); } } catch (Exception) { } try { if (output != null) { output.Close(); } } catch (Exception) { } } }