public static void DownloadFile(AWSArchiveRquest request) { AmazonGlacier client; try { using (client = new AmazonGlacierClient(Amazon.RegionEndpoint.USEast1)) { // Setup SNS topic and SQS queue. SetupTopicAndQueue(); RetrieveArchive(client, request); } Console.WriteLine("Operations successful. To continue, press Enter"); } catch (AmazonGlacierException e) { Console.WriteLine(e.Message); } catch (AmazonServiceException e) { Console.WriteLine(e.Message); } catch (Exception e) { Console.WriteLine(e.Message); } finally { // Delete SNS topic and SQS queue. snsClient.DeleteTopic(new DeleteTopicRequest() { TopicArn = topicArn }); sqsClient.DeleteQueue(new DeleteQueueRequest() { QueueUrl = queueUrl }); } }
public static AWSArchiveResult DownloadFile(AWSArchiveRquest request) { using (manager = new ArchiveTransferManager(RegionEndpoint.USEast1)) { if (CheckRequiredFields()) { try { Form1.log.Info("Download a Archive"); try { var options = new DownloadOptions(); options.StreamTransferProgress += AWSMoveFilesXDynamo.OnProgress; // Download an archive. manager.Download(vaultName, request.ArchiveID, request.Description, options); } catch (AmazonGlacierException e) { Form1.log.Error(e.Message); } catch (AmazonServiceException e) { Form1.log.Error(e.Message); } catch (Exception e) { Form1.log.Error(e.Message); } } catch (AmazonGlacierException e) { Form1.log.Error(e.Message); } catch (AmazonServiceException e) { Form1.log.Error(e.Message); } } return(new AWSArchiveResult()); } }
private void DownloadT(AWSArchiveRquest are) { int sleepms = (int)(are.SleepSec * 1000); Form1.log.Info("Sleep To Stagger Are Request " + Convert.ToString(sleepms) + "ms " + are.ArchiveID + " " + are.Description + " " + are.FileName); Thread.Sleep(sleepms); AWSMoveFilesXDynamoMT mtdl = new AWSMoveFilesXDynamoMT(are.VaultName); mtdl.DownloadFile(are); }
private void Restore_Click(object sender, EventArgs e) { String fileFilter = "GlaicerLogs (*GlacierTransationLog*.csv)|*GlacierTransationLog*.csv";//"Text files (*.txt)|*.txt fileLogFileDialog.Filter = fileFilter; fileLogFileDialog.ShowDialog(); StreamReader sr = new StreamReader(fileLogFileDialog.FileName); String line = String.Empty; ArrayList taskArray = new ArrayList(); int sleepSec = 0; String headers = sr.ReadLine(); //AWSVaultName,ArchiveName,ArchiveID,SHA256 Tree Checksum Received,SHA256 Tree Checksum Sent string[] headerFields = headers.Split(new char[] { ',' }); if (String.Compare(headerFields[0], "AWSVaultName") == -1) { throw new Exception("Possible problem with restore file. Expecting AWSVaultName,ArchiveName,ArchiveID,SHA256 Tree Checksum Received,SHA256 Tree Checksum Sent"); } while ((line = sr.ReadLine()) != null) { AWSArchiveRquest are = new AWSArchiveRquest(); string[] fields = line.Split(new char[] { ',' }); are.VaultName = fields[0]; are.Description = fields[1]; are.FileName = fields[1]; are.ArchiveID = fields[2]; are.ChecksumTreeSHA256Compressed = fields[3]; are.SleepSec = sleepSec; sleepSec += (int)(60 * 60 * 1); taskArray.Add(Task.Factory.StartNew(() => DownloadT(are))); } Task[] TArr = (Task[])taskArray.ToArray(typeof(Task)); Task.WaitAll(TArr); }
private static void DownloadOutput(string jobId, AmazonGlacier client, AWSArchiveRquest request) { GetJobOutputRequest getJobOutputRequest = new GetJobOutputRequest() { JobId = jobId, VaultName = vaultName }; GetJobOutputResponse getJobOutputResponse = client.GetJobOutput(getJobOutputRequest); GetJobOutputResult result = getJobOutputResponse.GetJobOutputResult; using (Stream webStream = result.Body) { using (Stream fileToSave = File.OpenWrite(request.Description)) { CopyStream(webStream, fileToSave); } } }
static void RetrieveArchive(AmazonGlacier client, AWSArchiveRquest request) { // Initiate job. InitiateJobRequest initJobRequest = new InitiateJobRequest() { VaultName = vaultName, JobParameters = new JobParameters() { Type = "archive-retrieval", ArchiveId = request.ArchiveID, Description = "This job is to download archive updated as part of getting started", SNSTopic = topicArn, } }; InitiateJobResponse initJobResponse = client.InitiateJob(initJobRequest); string jobId = initJobResponse.InitiateJobResult.JobId; // Check queue for a message and if job completed successfully, download archive. ProcessQueue(jobId, client, request); }
private static void ProcessQueue(string jobId, AmazonGlacier client, AWSArchiveRquest request) { var receiveMessageRequest = new ReceiveMessageRequest() { QueueUrl = queueUrl, MaxNumberOfMessages = 1 }; bool jobDone = false; while (!jobDone) { var receiveMessageResponse = sqsClient.ReceiveMessage(receiveMessageRequest); if (receiveMessageResponse.ReceiveMessageResult.Message.Count == 0) { Thread.Sleep(1000 * 60); continue; } Amazon.SQS.Model.Message message = receiveMessageResponse.ReceiveMessageResult.Message[0]; Dictionary <string, string> outerLayer = JsonConvert.DeserializeObject <Dictionary <string, string> >(message.Body); Dictionary <string, string> fields = JsonConvert.DeserializeObject <Dictionary <string, string> >(outerLayer["Message"]); string statusCode = fields["StatusCode"] as string; if (string.Equals(statusCode, GlacierUtils.JOB_STATUS_SUCCEEDED, StringComparison.InvariantCultureIgnoreCase)) { Console.WriteLine("Downloading job output"); DownloadOutput(jobId, client, request); // This where we save job output to the specified file location. } else if (string.Equals(statusCode, GlacierUtils.JOB_STATUS_FAILED, StringComparison.InvariantCultureIgnoreCase)) { Console.WriteLine("Job failed... cannot download the archive."); } jobDone = true; sqsClient.DeleteMessage(new DeleteMessageRequest() { QueueUrl = queueUrl, ReceiptHandle = message.ReceiptHandle }); } }
public AWSArchiveResult DownloadFile(AWSArchiveRquest request) { archiveRequest = request; using (manager = new ArchiveTransferManager(RegionEndpoint.USEast1)) { if (CheckRequiredFields()) { try { Form1.log.Info("Download Archive" + request.ArchiveID + " " + request.Description + " " + request.FileName); try { var options = new DownloadOptions(); options.StreamTransferProgress += OnProgress; // Download an archive. manager.Download(vaultName, request.ArchiveID, request.Description, options); if (request.Description.Contains(".tif")) { String outfileName = String.Empty; String fileName = request.Description; FileInfo fi = new FileInfo(fileName); fi.MoveTo(fileName + ".z"); fileName = fileName + ".z"; outfileName = fileName.Replace(".z", ""); FileStream inputFile = File.Open(fileName, FileMode.Open, FileAccess.Read); byte[] treeHash = Form1.ComputeSHA256TreeHash(inputFile); String zipChecksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); Form1.log.Info(fileName + " Tree SHA 256 Checksum : " + zipChecksum); Form1.log.Info(fileName + " Original Injection Tree SHA 256 Checksum : " + request.ChecksumTreeSHA256Compressed); ParallelCompress.doNotUseTPL = false; ParallelCompress.compressStrictSeqential = false; ParallelCompress.UncompressFast(outfileName, fileName, true); inputFile = File.Open(outfileName, FileMode.Open, FileAccess.Read); treeHash = Form1.ComputeSHA256TreeHash(inputFile); String decompressedChecksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); Form1.log.Info(outfileName + " Decmpressed Tree SHA 256 Checksum : " + zipChecksum); Form1.log.Info(outfileName + " Decmplressed Original Tree SHA 256 Checksum : " + request.ChecksumTreeSHA256Compressed); } } catch (AmazonGlacierException e) { Form1.log.Error(e.ToString()); } catch (AmazonServiceException e) { Form1.log.Error(e.ToString()); } catch (Exception e) { Form1.log.Error(e.ToString()); } } catch (AmazonGlacierException e) { Form1.log.Error(e.ToString()); } catch (AmazonServiceException e) { Form1.log.Error(e.ToString()); } } return(new AWSArchiveResult()); } }