AWSArchiveResult CompleteMPU(string uploadID, AmazonGlacier client, List <string> partChecksumList, FileInfo fio) { try { long fileLength = fio.Length; fileLength = new FileInfo(archiveToUpload).Length; FileStream inputFile = File.Open(archiveToUpload, FileMode.Open, FileAccess.Read); byte[] treeHash = Form1.ComputeSHA256TreeHash(inputFile); String localChecksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); CompleteMultipartUploadRequest completeMPUrequest = new CompleteMultipartUploadRequest() { UploadId = uploadID, ArchiveSize = fileLength.ToString(), Checksum = localChecksum, VaultName = vaultName }; CompleteMultipartUploadResponse completeMPUresponse = client.CompleteMultipartUpload(completeMPUrequest); AWSArchiveResult ar = new AWSArchiveResult(); ar.ArchiveID = completeMPUresponse.CompleteMultipartUploadResult.ArchiveId; ar.Checksum = localChecksum; return(ar); } catch (Exception e) { Form1.log.Error(e.ToString()); return(new AWSArchiveResult()); } }
public AWSArchiveResult UploadFile(String filePath, String nativeChecksum, String archiveDescription) { using (manager = new ArchiveTransferManager(RegionEndpoint.USEast1)) { if (CheckRequiredFields()) { try { Form1.log.Info("Upload a Archive"); var uploadResult = manager.Upload(vaultName, archiveDescription, filePath); archiveId = uploadResult.ArchiveId; Form1.log.Info("Upload successful. Archive Id : " + uploadResult.ArchiveId + "Checksum : " + uploadResult.Checksum); var config = new AmazonDynamoDBConfig(); config.ServiceURL = "http://dynamodb.us-east-1.amazonaws.com"; client = new AmazonDynamoDBClient(config); Table dynamoTable = Table.LoadTable(client, "FL6119"); var entry = new Document(); entry["FileID"] = uploadResult.ArchiveId; entry["date"] = System.DateTime.Now; entry["checksum"] = uploadResult.Checksum; entry["archiveID"] = filePath; entry["NAtiveChecksum"] = nativeChecksum; dynamoTable.PutItem(entry); AWSArchiveResult ar = new AWSArchiveResult(); ar.ArchiveID = uploadResult.ArchiveId; ar.Checksum = uploadResult.Checksum; return(ar); } catch (AmazonGlacierException e) { Form1.log.Error(e.ToString()); } catch (AmazonServiceException e) { Form1.log.Error(e.ToString()); } } return(new AWSArchiveResult()); } }
public AWSArchiveResult UploadFile(String filePath, String nativeChecksum, String arDescription) { archiveToUpload = filePath; archiveDescription = arDescription; FileInfo fio = new FileInfo(filePath); archiveDescription = fio.Name; AWSArchiveResult ar = new AWSArchiveResult(); AmazonGlacier client; List <string> partChecksumList = new List <string>(); SHA256ConcurrentQueue = new ConcurrentQueue <string>(); try { using (client = new AmazonGlacierClient(Amazon.RegionEndpoint.USEast1)) { Console.WriteLine("Uploading an archive."); string uploadId = InitiateMultipartUpload(client); partChecksumList = UploadParts(uploadId, client); ar = CompleteMPU(uploadId, client, partChecksumList, fio); } } catch (AmazonGlacierException e) { Form1.log.Error(e.ToString()); } catch (AmazonServiceException e) { Form1.log.Error(e.ToString()); } catch (Exception e) { Form1.log.Error(e.ToString()); } return(ar); }
private void GlaiceirUpload_Click(object sender, EventArgs e) { try { FileStream inputFile = File.Open(archiveName, FileMode.Open, FileAccess.Read); byte[] treeHash = ComputeSHA256TreeHash(inputFile); String Checksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); AWSMoveFilesXDynamoMT mtdl = new AWSMoveFilesXDynamoMT(awsVaultName); AWSArchiveResult ar = mtdl.UploadFile(archiveName, Checksum, archiveName); log.Info("Archived " + archiveName); log.Info("Archive Checsum In " + Checksum); log.Info("Archive Checksum Out " + ar.Checksum); } catch (Exception exc) { log.Error(exc.Message); } }
private void RetryGalcier_Click(object sender, EventArgs e) { try { DateTime dt = new DateTime(); dt = DateTime.Now; String dtString = dt.ToString(); dtString = dtString.Replace("/", "_").Replace(":", "").Replace(" ", ""); openFileDialog1.ShowDialog(); String archiveName = openFileDialog1.FileName; FileStream inputFile = File.Open(archiveName, FileMode.Open, FileAccess.Read); glacierTransactionLog = new StreamWriter("GlacierTransationLog_" + dtString + ".csv"); glacierTransactionLog.WriteLine("AWSVaultName" + "," + "ArchiveName" + "," + "ArchiveID" + "," + "SHA256 Tree Checksum Received" + "," + "SHA256 Tree Checksum Sent"); log.Info("Calculating SHA256TreeHash on Tar"); byte[] treeHash = ComputeSHA256TreeHash(inputFile); String Checksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); log.Info("Sending to Glacier: " + archiveName + "With Checksum " + Checksum); ArchiveUploadMultipartParallel aruo = new ArchiveUploadMultipartParallel(awsVaultName); AWSArchiveResult ar = aruo.UploadFile(archiveName, "", archiveName); glacierTransactionLog.WriteLine(awsVaultName + "," + archiveName + "," + ar.ArchiveID + "," + ar.Checksum + "," + Checksum); glacierTransactionLog.Flush(); glacierTransactionLog.Close(); } catch (Exception exc) { log.Error(exc.Message); } }
private void BulkTransfer(String fileList) { FileInfo fioi = new FileInfo(fileList); String fileLogName = fioi.Name.Replace(".csv", ""); DateTime dt = new DateTime(); dt = DateTime.Now; String dtString = dt.ToString(); dtString = dtString.Replace("/", "_").Replace(":", "").Replace(" ", ""); String transactionLogName = "GlacierTransationLog_" + fileLogName + "_" + dtString + ".csv"; glacierTransactionLog = new StreamWriter(transactionLogName); log.Info("Making ArchiveLog :" + transactionLogName); glacierTransactionLog.WriteLine("AWSVaultName" + "," + "ArchiveName" + "," + "ArchiveID" + "," + "SHA256 Tree Checksum Received" + "," + "SHA256 Tree Checksum Sent"); String tarFileIdLogName = "Tar_FileID_" + fileLogName + "_" + dtString + ".csv"; TarFileIdLog = new StreamWriter(tarFileIdLogName); log.Info("Making ArchiveLog :" + tarFileIdLogName); StreamWriter ResubmitJobFileIds = new StreamWriter(fileLogName + "AWSGlacierCopyResubmit" + dtString + ".csv"); ArrayList serverName = new ArrayList(); ArrayList directoryName = new ArrayList(); ArrayList archiveFileList = new ArrayList(); ArrayList fileLogLines = new ArrayList(); String headers = String.Empty; try { StreamReader sr = new StreamReader(fileList); headers = sr.ReadLine(); ResubmitJobFileIds.WriteLine(headers); String line = String.Empty; while ((line = sr.ReadLine()) != null) { fileLogLines.Add(line); string[] fields = line.Split(new char[] { ',' }); serverName.Add(fields[0]); directoryName.Add(fields[1]); archiveFileList.Add(fields[2]); } } catch (Exception ex) { log.Error(ex.ToString()); } try { int partNumber = 0; createTARAndTxLog(partNumber); //We compress, tar and transfer in chunkc of size maxTARGBThreshold for (int i = 0; i < archiveFileList.Count; i++) { bool lastFile = i == archiveFileList.Count - 1?true:false; String sourceDirectory = String.Empty; if (serverName[i] != String.Empty) { sourceDirectory = "\\\\" + serverName[i] + "\\" + directoryName[i] + "\\"; } else { sourceDirectory = directoryName[i] + "\\"; } System.IO.Directory.SetCurrentDirectory(System.AppDomain.CurrentDomain.BaseDirectory); string sinkDirectory = Directory.GetCurrentDirectory(); long bytesTarred = 0; FileInfo fiobt = new System.IO.FileInfo(archiveName); bytesTarred = fiobt.Length; try { ArrayList awsDynamoFields = new ArrayList(); ArrayList awsDynamoVals = new ArrayList(); archiveFile(sinkDirectory, sourceDirectory, (string)archiveFileList[i], awsDynamoFields, awsDynamoVals); awsDynamoFields.Add("ArchiveName"); awsDynamoVals.Add(archiveName); string[] fieldsA = (string[])awsDynamoFields.ToArray(typeof(string)); string[] valsA = (string[])awsDynamoVals.ToArray(typeof(string)); String dynamoTableName = textBoxAWSDynamoTableName.Text; DynamoDBHelper.MakeDynamoEntry(dynamoTableName, fieldsA, valsA); TarFileIdLog.WriteLine(archiveName + "," + fileLogLines[i]); TarFileIdLog.Flush(); } catch (Exception ex) { ResubmitJobFileIds.WriteLine(fileLogLines[i]); ResubmitJobFileIds.Flush(); log.Error(ex.ToString()); } if (bytesTarred > (Math.Pow(10, 9) * maxTARGBThreshold) || lastFile) { glacierTransactionLog.Flush(); tarLog.Flush(); tarLog.Close(); TarEntry entry = TarEntry.CreateEntryFromFile(tarLogName); archive.WriteEntry(entry, true); archive.Close(); //Copy TarFiles and Logs To Scratch Place DirectoryInfo di = new DirectoryInfo(sinkDirectory); FileInfo[] zFiles = di.GetFiles("*.z", SearchOption.TopDirectoryOnly); for (int j = 0; j < zFiles.Length; j++) { FileInfo fio = zFiles[j]; fio.Delete(); } //Write Out Remaining Log in case of problem StreamWriter remainingFileLog = new StreamWriter(fileLogName + "_Recover_" + "_" + dtString + ".csv"); remainingFileLog.WriteLine(headers); for (int k = i; k < serverName.Count; k++) { remainingFileLog.WriteLine(fileLogLines[k]); } remainingFileLog.Flush(); remainingFileLog.Close(); //Transfer Tar Here log.Info("Calculating SHA256TreeHash on Tar"); FileStream inputFile = File.Open(archiveName, FileMode.Open, FileAccess.Read); byte[] treeHash = ComputeSHA256TreeHash(inputFile); String Checksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); log.Info("Sending to Glacier: " + archiveName + "With Checksum " + Checksum); ArchiveUploadMultipartParallel aruo = new ArchiveUploadMultipartParallel(awsVaultName); AWSArchiveResult ar = aruo.UploadFile(archiveName, "", archiveName); glacierTransactionLog.WriteLine(awsVaultName + "," + archiveName + "," + ar.ArchiveID + "," + ar.Checksum + "," + Checksum); glacierTransactionLog.Flush(); //Make Dynamo Entry for Archive ArrayList awsDynamoFields = new ArrayList(); ArrayList awsDynamoVals = new ArrayList(); awsDynamoFields.Add("ArchiveName"); awsDynamoVals.Add(archiveName); awsDynamoFields.Add("ArchiveId"); awsDynamoVals.Add(ar.ArchiveID); awsDynamoFields.Add("ArchiveCheckSum"); awsDynamoVals.Add(ar.Checksum); string[] fieldsA = (string[])awsDynamoFields.ToArray(typeof(string)); string[] valsA = (string[])awsDynamoVals.ToArray(typeof(string)); String dynamoTableName = textBoxAWSDynamoTableName.Text; DynamoDBHelper.MakeDynamoEntry(dynamoTableName, fieldsA, valsA); if (!lastFile) { partNumber += 1; createTARAndTxLog(partNumber); } } } ////Close and send the last TarBall //{ // glacierTransactionLog.Flush(); // tarLog.Flush(); // tarLog.Close(); // TarEntry entry = TarEntry.CreateEntryFromFile(tarLogName); // archive.WriteEntry(entry, true); // archive.Close(); // //Transfer Tar Here // log.Info("Calculating SHA256TreeHash on Tar"); // FileStream inputFile = File.Open(archiveName, FileMode.Open, FileAccess.Read); // byte[] treeHash = ComputeSHA256TreeHash(inputFile); // String Checksum = BitConverter.ToString(treeHash).Replace("-", "").ToLower(); // log.Info("Sending to Glacier: " + archiveName + "With Checksum " + Checksum); // ArchiveUploadMultipartParallel aruo = new ArchiveUploadMultipartParallel(awsVaultName); // AWSArchiveResult ar = aruo.UploadFile(archiveName, "", archiveName); // glacierTransactionLog.WriteLine(awsVaultName + "," + archiveName + "," + ar.ArchiveID + "," + ar.Checksum + "," + Checksum); // glacierTransactionLog.Flush(); // MessageBox.Show("AWS Glacier Transation Complete "); // //Application.Exit(); //} } catch (Exception ex) { log.Error(ex.ToString()); } }