public void TestUpdateRange() { var before = ObjectContent_Before.Substring(0, ObjectContent_Before.IndexOf(ObjectContent_Replace_Old)); var updateRequest = (UpdateObjectRequest) new UpdateObjectRequest() .WithUpdateRange( Encoding.UTF8.GetByteCount(before), Encoding.UTF8.GetByteCount(before) + Encoding.UTF8.GetByteCount(ObjectContent_Replace_New) - 1) .WithBucketName(bucket.BucketName) .WithKey(this.objKey) .WithContentBody(ObjectContent_Replace_New); using (var updateResponse = client.UpdateObject(updateRequest)) { } // check content var getRequest = new GetObjectRequest() .WithBucketName(bucket.BucketName) .WithKey(this.objKey); using (var getResponse = client.GetObject(getRequest)) { string contentBody = getResponse.GetResponseContentBody(); Assert.AreEqual(ObjectContent_After, contentBody); } }
/// <summary> /// Reads a string from an S3 bucket /// </summary> /// <param name="location">The location of the data you want to read</param> /// <param name="guid">The guid of the content you're reading</param> /// <returns>A string interpretation of the data</returns> public string ReadString(StorageLocations location, string guid) { var keyName = string.Format("{0}/{1}.onx", StorageLocationToString(location), guid); var request = new GetObjectRequest().WithBucketName(Bucket).WithKey(keyName); return(new StreamReader(_client.GetObject(request).ResponseStream, Encoding.ASCII).ReadToEnd()); }
public static Job DeserializeFromS3(string bucket, string state_id, string aws_id, string aws_secret) { Job j; // download Job from S3 using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(aws_id, aws_secret)) { GetObjectRequest getObjectRequest = new GetObjectRequest() { BucketName = bucket, Key = "state_" + state_id }; using (S3Response getObjectResponse = client.GetObject(getObjectRequest)) { using (Stream s = getObjectResponse.ResponseStream) { // deserialize IFormatter formatter = new BinaryFormatter(); j = (Job)formatter.Deserialize(s); } } } return(j); }
public static void ResizeImageAndUpload(AmazonS3 anAmazonS3Client, string aBucketName, string aCurrentPhotoName, string aNewImageName, int aSize) { GetObjectRequest myGetRequest = new GetObjectRequest().WithBucketName(aBucketName).WithKey(aCurrentPhotoName); GetObjectResponse myResponse = anAmazonS3Client.GetObject(myGetRequest); Stream myStream = myResponse.ResponseStream; ResizeAndUpload(myStream, anAmazonS3Client, aBucketName, aNewImageName, aSize); }
/// <summary> /// Reads an object from S3 /// </summary> /// <param name="bucket">The name of the bucket where the object lives</param> /// <param name="key">The name of the key to use</param> public GetObjectResult get(string bucket, string key) { DateTime timestamp = AWSDateFormatter.GetCurrentTimeResolvedToMillis(); string signature = makeSignature("GetObject", timestamp); return(s3.GetObject(bucket, key, false, true, true, awsAccessKeyId, timestamp, true, signature, null)); }
public override IVirtualFile GetFile(string virtualPath) { if (string.IsNullOrEmpty(virtualPath)) { return(null); } var filePath = SanitizePath(virtualPath); try { var response = AmazonS3.GetObject(new GetObjectRequest { Key = filePath, BucketName = BucketName, }); var dirPath = GetDirPath(filePath); return(new S3VirtualFile(this, new S3VirtualDirectory(this, dirPath, GetParentDirectory(dirPath))).Init(response)); } catch (AmazonS3Exception ex) { if (ex.StatusCode == HttpStatusCode.NotFound) { return(null); } throw; } }
public byte[] FetchFile(string sObjectKey, string sVersionId) { AmazonS3 client = AWSClientFactory.CreateAmazonS3Client(S3ACCESSKEY, S3SECRETKEY); string BUCKET_NAME = ConfigurationManager.AppSettings["AWSBUCKET"]; GetObjectRequest request = new GetObjectRequest(); request.WithKey(sObjectKey); request.WithBucketName(BUCKET_NAME); if (sVersionId != "") { request.WithVersionId(sVersionId); } GetObjectResponse response = client.GetObject(request); byte[] buffer = new byte[response.ContentLength]; int read; MemoryStream ms = new MemoryStream(); while ((read = response.ResponseStream.Read(buffer, 0, buffer.Length)) > 0) { ms.Write(buffer, 0, read); } return(ms.ToArray()); }
static void ReadingAnObject() { try { GetObjectRequest request = new GetObjectRequest().WithBucketName(bucketName).WithKey(keyName); using (GetObjectResponse response = client.GetObject(request)) { string title = response.Metadata["x-amz-meta-title"]; Console.WriteLine("The object's title is {0}", title); string dest = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), keyName); if (!File.Exists(dest)) { response.WriteResponseStreamToFile(dest); } } } catch (AmazonS3Exception amazonS3Exception) { if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { Console.WriteLine("Please check the provided AWS Credentials."); Console.WriteLine("If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"); } else { Console.WriteLine("An error occurred with the message '{0}' when reading an object", amazonS3Exception.Message); } } }
/// <summary> /// Copies this file to the location indicated by the passed in S3FileInfo. /// If the file already exists in S3 and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="file">The target location to copy this file to.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists in S3 and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>S3FileInfo of the newly copied file.</returns> public S3FileInfo CopyTo(S3FileInfo file, bool overwrite) { if (!overwrite) { if (file.Exists) { throw new IOException("File already exists"); } } if (SameClient(file)) { var request = new CopyObjectRequest { DestinationBucket = file.BucketName, DestinationKey = S3Helper.EncodeKey(file.ObjectKey), SourceBucket = bucket, SourceKey = S3Helper.EncodeKey(key) }; request.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; s3Client.CopyObject(request); } else { var getObjectRequest = new GetObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; getObjectRequest.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; var getObjectResponse = s3Client.GetObject(getObjectRequest); using (Stream stream = getObjectResponse.ResponseStream) { var putObjectRequest = new PutObjectRequest { BucketName = file.BucketName, Key = S3Helper.EncodeKey(file.ObjectKey), InputStream = stream }; putObjectRequest.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; file.S3Client.PutObject(putObjectRequest); } } return(file); }
public string GetObjectInformation(string bucketName, string objectKey) { GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(bucketName).WithKey(objectKey); GetObjectResponse response = _client.GetObject(request); return(response.AmazonId2); }
private static void GetObject(AmazonS3 s3Client, string bucket, string key) { var getObjectRequest = new GetObjectRequest().WithBucketName(bucket).WithKey(key); using (var getObjectResponse = s3Client.GetObject(getObjectRequest)) { var memoryStream = new MemoryStream(); getObjectResponse.ResponseStream.CopyTo(memoryStream); var content = Encoding.Default.GetString(memoryStream.ToArray()); Console.WriteLine(content); } }
/// <summary> /// Download a file and save it to a specified location /// </summary> /// <param name="bucketName">The name of the bucket.</param> /// <param name="key">The key of the file to download.</param> /// <param name="file">The path for the file to be saved to.</param> /// <param name="timeoutMilliseconds">The timeout in milliseconds.</param> public void DownloadFile(string bucketName, string key, string file, int timeoutMilliseconds) { var request = new GetObjectRequest { BucketName = bucketName, Key = key, Timeout = timeoutMilliseconds }; var response = _amazonS3.GetObject(request); //TODO: check response status response.WriteResponseStreamToFile(file); }
/// <summary> /// Copies this file to the location indicated by the passed in S3FileInfo. /// If the file already exists in S3 and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="file">The target location to copy this file to.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists in S3 and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>S3FileInfo of the newly copied file.</returns> public S3FileInfo CopyTo(S3FileInfo file, bool overwrite) { if (!overwrite) { if (file.Exists) { throw new IOException("File already exists"); } } if (SameClient(file)) { s3Client.CopyObject(new CopyObjectRequest() .WithDestinationBucket(file.BucketName) .WithDestinationKey(S3Helper.EncodeKey(file.ObjectKey)) .WithSourceBucket(bucket) .WithSourceKey(S3Helper.EncodeKey(key)) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as CopyObjectRequest); } else { using (Stream stream = s3Client.GetObject(new GetObjectRequest() .WithBucketName(bucket) .WithKey(S3Helper.EncodeKey(key)) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as GetObjectRequest) .ResponseStream) { file.S3Client.PutObject((PutObjectRequest) new PutObjectRequest() .WithBucketName(file.BucketName) .WithKey(S3Helper.EncodeKey(file.ObjectKey)) .WithInputStream(stream) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler)); } } return(file); }
public static void Get(string bucket, string key, string fileName) { AmazonS3 s3Client = AWSClientFactory.CreateAmazonS3Client(); FileInfo file = new FileInfo(key); Console.WriteLine("Download File " + bucket + ":" + key + " to " + fileName); GetObjectRequest get_req = new GetObjectRequest(); get_req.BucketName = bucket; get_req.Key = key; GetObjectResponse get_res = s3Client.GetObject(get_req); get_res.WriteResponseStreamToFile(fileName); Console.WriteLine(get_res.Metadata.AllKeys.FirstOrDefault()); }
/// <summary> /// /////////////////////get object and store on local server//////////////////////// /// </summary> /// <param name="s3Client"></param> /// <returns></returns> public static byte[] getobject(AmazonS3 s3Client) { GetObjectRequest request = new GetObjectRequest(); request.BucketName = BUCKET_NAME; request.Key = S3_KEY; GetObjectResponse res = s3Client.GetObject(request); res.WriteResponseStreamToFile(@"C:\Users\Mohammad\Desktop\Zahra.jpg"); Image img = Image.FromFile(@"C:\Users\Mohammad\Desktop\Zahra.jpg"); MemoryStream ms = new MemoryStream(); img.Save(ms, System.Drawing.Imaging.ImageFormat.Gif); return(ms.ToArray()); }
public Stream GetFile(string folderName, string fileName, bool useCache) { //folder ignored - packages stored on top level of S3 bucket if (String.IsNullOrWhiteSpace(folderName)) { throw new ArgumentNullException("folderName"); } if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } if (useCache && !string.IsNullOrWhiteSpace(clientContext.PackagesUrl)) { var url = new Uri(string.Format("{0}/{1}", clientContext.PackagesUrl, fileName)); WebRequest request = WebRequest.Create(url); WebResponse response = request.GetResponse(); return(response.GetResponseStream()); } else { var request = new GetObjectRequest(); request.WithBucketName(clientContext.BucketName); request.WithKey(fileName); request.WithTimeout((int)TimeSpan.FromMinutes(30).TotalMilliseconds); using (AmazonS3 client = clientContext.CreateInstance()) { try { S3Response response = WrapRequestInErrorHandler(() => client.GetObject(request)); if (response != null) { return(response.ResponseStream); } } catch (Exception) { //hate swallowing an error } return(null); } } }
public Stream get_file(string folderName, string fileName, bool useCache) { // It's allowed to have an empty folder name. // if (String.IsNullOrWhiteSpace(folderName)) throw new ArgumentNullException("folderName"); if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } folderName = (string.IsNullOrEmpty(folderName) ? String.Empty : folderName.Substring(folderName.Length - 1, 1) == "/" ? folderName : folderName + "/"); fileName = string.Format("{0}{1}", folderName, fileName); if (useCache && !string.IsNullOrWhiteSpace(clientContext.ImagesUrl)) { var url = new Uri(string.Format("{0}/{1}", clientContext.ImagesUrl, fileName)); WebRequest request = WebRequest.Create(url); WebResponse response = request.GetResponse(); return(response.GetResponseStream()); } else { var request = new GetObjectRequest(); request.WithBucketName(clientContext.BucketName); request.WithKey(fileName); request.WithTimeout((int)TimeSpan.FromMinutes(30).TotalMilliseconds); using (AmazonS3 client = clientContext.create_instance()) { try { S3Response response = wrap_request_in_error_handler(() => client.GetObject(request)); if (response != null) { return(response.ResponseStream); } } catch (Exception) { //hate swallowing an error } return(null); } } }
private HttpResponseMessage DownloadFile(int fileId, Account userData) { var fileData = _readOnlyRepository.First <File>(x => x.Id == fileId); var objectRequest = new GetObjectRequest { BucketName = userData.BucketName, Key = fileData.Url + fileData.Name }; var file = AWSClient.GetObject(objectRequest); var byteArray = new byte[file.ContentLength]; file.ResponseStream.Read(byteArray, 0, (int)file.ContentLength); HttpResponseMessage response = new HttpResponseMessage(); response.StatusCode = HttpStatusCode.OK; response.Content = new StreamContent(file.ResponseStream); return(response); }
public Stream LoadObject(string fileName) { if (String.IsNullOrEmpty(fileName)) { throw new ArgumentException("File name is required.", "fileName"); } try { using (AmazonS3 client = CreateS3Client()) { if (DoesBucketExist(client) == false) { throw new StorageException(String.Format("AWS S3 bucket [{0}] does not exist. File not found.", BucketName)); } if (DoesObjectExist(client, fileName) == false) { throw new StorageException( String.Format("AWS S3 object [{0}/{1}] does not exist. File not found.", BucketName, fileName)); } GetObjectRequest objectRequest = new GetObjectRequest().WithBucketName(BucketName).WithKey(fileName); using (GetObjectResponse objectResponse = client.GetObject(objectRequest)) { if (objectResponse.ResponseStream.CanSeek) { objectResponse.ResponseStream.Position = 0; } return(objectResponse.ResponseStream); } } } catch (AmazonS3Exception s3Ex) { throw AnAwsRelatedException(s3Ex); } catch (Exception ex) { throw new StorageException("An error occurred while processing your request.", ex); } }
static void ReadingAnObject() { try { GetObjectRequest request = new GetObjectRequest().WithBucketName(bucketName).WithKey(keyName); using (S3Response response = client.GetObject(request)) { string title = response.Metadata["x-amz-meta-title"]; Console.WriteLine("The object's title is {0}", title); string dest = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), keyName); if (!File.Exists(dest)) { using (Stream s = response.ResponseStream) { using (FileStream fs = new FileStream(dest, FileMode.Create, FileAccess.Write)) { byte[] data = new byte[32768]; int bytesRead = 0; do { bytesRead = s.Read(data, 0, data.Length); fs.Write(data, 0, bytesRead); }while (bytesRead > 0); fs.Flush(); } } } } } catch (AmazonS3Exception amazonS3Exception) { if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { Console.WriteLine("Please check the provided AWS Credentials."); Console.WriteLine("If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"); } else { Console.WriteLine("An error occurred with the message '{0}' when reading an object", amazonS3Exception.Message); } } }
public bool OpenFile(object FileObject, ITableMetadata TableMeta) { //throw new NotImplementedException(); var fileName = (S3Object)FileObject; AmazonS3 s3 = AWSClientFactory.CreateAmazonS3Client(); var gor = new GetObjectRequest().WithBucketName(fileName.BucketName).WithKey(fileName.Key); var file = s3.GetObject(gor); string text = ""; using (var ms = new MemoryStream()) { System.Diagnostics.Trace.WriteLine("Started reading file"); file.ResponseStream.CopyTo(ms); //actually fetches the file from S3 System.Diagnostics.Trace.WriteLine("Finished reading file"); using (var gzipStream = new GZipStream(new MemoryStream(ms.ToArray()), CompressionMode.Decompress)) { System.Diagnostics.Trace.WriteLine("Decompressing file"); const int size = 4096; byte[] buffer = new byte[size]; using (MemoryStream memory = new MemoryStream()) { int count = 0; do { count = gzipStream.Read(buffer, 0, size); if (count > 0) { memory.Write(buffer, 0, count); } }while (count > 0); var memArray = memory.ToArray(); text = ASCIIEncoding.ASCII.GetString(memArray); } System.Diagnostics.Trace.WriteLine("Finished decompressing file"); } } Lines = text.Split(TableMeta.ColumnDelimiters, StringSplitOptions.RemoveEmptyEntries); System.Diagnostics.Trace.WriteLine("Finished reading file"); lineCount = -1; return(true); }
public IEnumerable <SimpleFile> GetSimulationInputFiles() { // S3:URL in description string bucketName = "Simulations"; string key = job.SimulationId.ToString(); //string dest = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "name.bin"); IAWSContext awsCtx = Turbine.Consumer.AWS.AppUtility.GetContext(); byte[] bytes; using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(awsCtx.AccessKey, awsCtx.SecretKey)) { ListObjectsRequest listObjectsRequest = new ListObjectsRequest() .WithBucketName(bucketName) .WithDelimiter("/") .WithPrefix(String.Format("/{0}/StagedInputFiles/", key)); using (ListObjectsResponse listObjectsResponse = client.ListObjects(listObjectsRequest)) { foreach (S3Object obj in listObjectsResponse.S3Objects) { GetObjectRequest getObjectRequest = new GetObjectRequest() .WithBucketName(bucketName) .WithKey(String.Format("/{0}/StagedInputFiles/{1}", key, obj.Key)); using (S3Response getObjectResponse = client.GetObject(getObjectRequest)) { using (System.IO.Stream s = getObjectResponse.ResponseStream) { using (var ms = new System.IO.MemoryStream()) { s.CopyTo(ms); bytes = ms.ToArray(); } } } var f = new SimpleFile() { content = bytes, name = obj.Key }; yield return(f); } } } }
public void DeserializeInputsFromS3(string input_bucket, Guid input_id, string aws_secret) { // S3 filename var key = "euses_inputs_" + input_id.ToString(); // temporary storage var inputs = new Dictionary <AST.Address, string>(); // download Job from S3 using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(_id, aws_secret)) { GetObjectRequest getObjectRequest = new GetObjectRequest() { BucketName = input_bucket, Key = key }; using (S3Response getObjectResponse = client.GetObject(getObjectRequest)) { using (Stream s = getObjectResponse.ResponseStream) { // deserialize IFormatter formatter = new BinaryFormatter(); inputs = (Dictionary <AST.Address, string>)formatter.Deserialize(s); } } } // sanity check System.Diagnostics.Debug.Assert(_inputs.Select(pair => pair.Key).Distinct().Count() == _inputs.Count()); // remove all leading and trailing whitespace from each string; Turkers will not be able to see it var ws = new System.Text.RegularExpressions.Regex(@"\s+"); inputs = inputs.Select(pair => new KeyValuePair <AST.Address, string>(pair.Key, ws.Replace(pair.Value.Trim(), " "))).ToDictionary(pair => pair.Key, pair => pair.Value); // exclude strings that contain zero or more occurrences of only whitespace // and replace runs of whitespaces with a single space var r = new System.Text.RegularExpressions.Regex(@"^\s*$"); _inputs = inputs.Where(pair => !r.IsMatch(pair.Value)).ToDictionary(pair => pair.Key, pair => pair.Value); }
private String[] ReadS3File(S3Object fileName) { var gor = new GetObjectRequest().WithBucketName(BUCKET).WithKey(fileName.Key); var file = s3.GetObject(gor); string text = ""; using (var ms = new MemoryStream()) { Trace.WriteLine("Started reading file"); file.ResponseStream.CopyTo(ms); //actually fetches the file from S3 Trace.WriteLine("Finished reading file"); using (var gzipStream = new GZipStream(new MemoryStream(ms.ToArray()), CompressionMode.Decompress)) { Trace.WriteLine("Decompressing file"); const int size = 4096; byte[] buffer = new byte[size]; using (MemoryStream memory = new MemoryStream()) { int count = 0; do { count = gzipStream.Read(buffer, 0, size); if (count > 0) { memory.Write(buffer, 0, count); } }while (count > 0); var memArray = memory.ToArray(); text = ASCIIEncoding.ASCII.GetString(memArray); } Trace.WriteLine("Finished decompressing file"); } } var lines = text.Split(delimiters, StringSplitOptions.RemoveEmptyEntries); Trace.WriteLine("Finished reading file"); return(lines); }
/// <summary> /// ////////////////////////Get Image/////////////////////////////////// /// </summary> /// <param name="s3Client"></param> /// <returns></returns> public static byte[] GetFile(AmazonS3 s3Client, string cmap, string clipping) { using (s3Client) { MemoryStream file = new MemoryStream(); try { GetObjectResponse r = s3Client.GetObject(new GetObjectRequest() { BucketName = cmap + "0", Key = clipping + "/" + S3_KEY }); try { // long transferred = 0L; BufferedStream stream2 = new BufferedStream(r.ResponseStream); byte[] buffer = new byte[0x2000]; int count = 0; while ((count = stream2.Read(buffer, 0, buffer.Length)) > 0) { file.Write(buffer, 0, count); } } finally { } Console.WriteLine(); return(file.ToArray()); } catch (AmazonS3Exception) { imgfound = false; Console.WriteLine("Oops!"); } } return(null); }
public void TestAppendObject() { var appendRequest = (AppendObjectRequest) new AppendObjectRequest() .WithBucketName(bucket.BucketName) .WithKey(this.objKey) .WithContentBody(ObjectContent_Delta); using (var appendResponse = client.AppendObject(appendRequest)) { Assert.AreEqual(Encoding.UTF8.GetByteCount(ObjectContent_Before), appendResponse.AppendOffset); } // check content var getRequest = new GetObjectRequest() .WithBucketName(bucket.BucketName) .WithKey(this.objKey); using (var getResponse = client.GetObject(getRequest)) { string contentBody = getResponse.GetResponseContentBody(); Assert.AreEqual(ObjectContent_After, contentBody); } }
public static bool ReadLogFile(string s3FileName, string credentialFilePath) { Type t = System.Reflection.MethodBase.GetCurrentMethod().DeclaringType; try { if (ReadS3Credentials(credentialFilePath) == false) { LogEvents.S3NoCredentials(t); return(false); } AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(_accessKeyId, _secretAccessKey); GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(_bucketName).WithKey(s3FileName); S3Response responseWithMetadata = client.GetObject(request); return(true); } catch (AmazonS3Exception amazonS3Exception) { LogEvents.S3Error(t, amazonS3Exception); return(false); } }
private void RestoreFromAmazon(AmazonS3 client) { try { GetObjectRequest request = new GetObjectRequest() .WithBucketName(this.BucketName) .WithKey(FileLocations.CONFIG_FILENAME); using (GetObjectResponse response = client.GetObject(request)) { response.WriteResponseStreamToFile(settings.FileLocations.Configuration); settings.ForceReload(); } this.ErrorLabel.ForeColor = Color.Black; this.ErrorLabel.Text = "The restore was a success!"; } catch (Exception exc) { this.ErrorLabel.ForeColor = Color.Red; this.ErrorLabel.Text = exc.Message; } }
public static System.IO.MemoryStream GetFile(AmazonS3 s3Client, string filekey) { using (s3Client) { S3_KEY = filekey; System.IO.MemoryStream file = new System.IO.MemoryStream(); try { GetObjectResponse r = s3Client.GetObject(new GetObjectRequest() { BucketName = BUCKET_NAME, Key = S3_KEY }); try { long transferred = 0L; System.IO.BufferedStream stream2 = new System.IO.BufferedStream(r.ResponseStream); byte[] buffer = new byte[0x2000]; int count = 0; while ((count = stream2.Read(buffer, 0, buffer.Length)) > 0) { file.Write(buffer, 0, count); } } finally { } return(file); } catch (AmazonS3Exception) { //Show exception } } return(null); }
private void getLastSession() { string cookieName; string cookieValue; using (client = Amazon.AWSClientFactory.CreateAmazonS3Client("AKIAJ47VSG7WMA62WLCA", "3tqlHujlftpk6j/z5OtDw2eg9N2FJtz1RwL8bEa3")) { GetObjectRequest txtRequest = new GetObjectRequest(); txtRequest.WithBucketName("intelrecruiter") .WithKey(nsbeCookieValueKey); var response = client.GetObject(txtRequest); StreamReader reader = new StreamReader(response.ResponseStream); cookieValue = reader.ReadToEnd(); response.Dispose(); txtRequest = new GetObjectRequest(); txtRequest.WithBucketName("intelrecruiter") .WithKey(nsbeCookieNameKey); response = client.GetObject(txtRequest); reader = new StreamReader(response.ResponseStream); cookieName = reader.ReadToEnd(); response.Dispose(); } nsbeCookie = new Cookie(cookieName, cookieValue, "/"); }
static void Main(string[] args) { startTime = DateTime.Now; //Catch exceptions AppDomain currentDomain = AppDomain.CurrentDomain; currentDomain.UnhandledException += new UnhandledExceptionEventHandler(MyHandler); //Catch ctrl+c to we can put out our summary Console.CancelKeyPress += (sender, eventArgs) => { eventArgs.Cancel = false; WriteLog("!!CANCELLED!!"); keepRunning = false; PrintSummary(); }; ServicePointManager.Expect100Continue = true; ServicePointManager.SecurityProtocol = SecurityProtocolType.Ssl3; string strConfigFile = "CrossCloudBackup.xml"; if (args.Length > 0) { strConfigFile = args[0]; } if (!File.Exists(strConfigFile)) { new XDocument( new XDeclaration("1.0", "utf-8", "yes"), new XComment("CrossCloudBackup Local Config File"), new XElement("root", new XElement("AWSKey", "someValue"), new XElement("AWSSecret", "someValue"), new XElement("AWSRegion", "eu-west-1"), new XElement("RSUsername", "someValue"), new XElement("RSAPIKey", "someValue"), new XElement("RSUseServiceNet", "false"), new XElement("RSLocation", "UK"), new XElement("ExcludeBuckets", ""), new XElement("ExcludeContainers", ""), //new XElement("MirrorAll", "true"), /TODO: Add Selective Sync new XElement("RSBackupContainer", "s3-backup"), new XElement("S3BackupBucket", "rs-backup") //new XElement("TransferThreads", "3") //TODO: Add Threading ) ) .Save(strConfigFile); Console.WriteLine(strConfigFile + " not found, blank one created."); Console.WriteLine("Press enter to exit..."); Console.ReadLine(); Environment.Exit(1); } //We know the config file exists, so open and read values XDocument config = XDocument.Load(strConfigFile); //Get AWS config string AWSKey = config.Element("root").Element("AWSKey").Value; string AWSSecret = config.Element("root").Element("AWSSecret").Value; RegionEndpoint region = RegionEndpoint.EUWest1; switch (config.Element("root").Element("AWSRegion").Value) { case "eu-west-1": region = RegionEndpoint.EUWest1; break; case "sa-east-1": region = RegionEndpoint.SAEast1; break; case "us-east-1": region = RegionEndpoint.USEast1; break; case "ap-northeast-1": region = RegionEndpoint.APNortheast1; break; case "us-west-2": region = RegionEndpoint.USWest2; break; case "us-west-1": region = RegionEndpoint.USWest1; break; case "ap-southeast-1": region = RegionEndpoint.APSoutheast1; break; case "ap-southeast-2": region = RegionEndpoint.APSoutheast2; break; default: region = RegionEndpoint.EUWest1; break; } //Create a connection to S3 WriteLog("Connecting to S3"); S3Client = AWSClientFactory.CreateAmazonS3Client(AWSKey, AWSSecret, region); //Get RS config Rackspace.CloudFiles.Utils.AuthUrl rsRegion = Rackspace.CloudFiles.Utils.AuthUrl.US; switch (config.Element("root").Element("RSLocation").Value) { case "UK": rsRegion = Rackspace.CloudFiles.Utils.AuthUrl.UK; break; case "US": rsRegion = Rackspace.CloudFiles.Utils.AuthUrl.US; break; case "Mosso": rsRegion = Rackspace.CloudFiles.Utils.AuthUrl.Mosso; break; } //Create connection to Rackspace WriteLog("Connecting to Rackspace Cloud Files"); RSConnection = new Connection(new UserCredentials(config.Element("root").Element("RSUsername").Value, config.Element("root").Element("RSAPIKey").Value, rsRegion), Convert.ToBoolean(config.Element("root").Element("RSUseServiceNet").Value)); //Get exclusions string[] excludeBuckets = config.Element("root").Element("ExcludeBuckets").Value.Split(','); string[] excludeContainers = config.Element("root").Element("ExcludeContainers").Value.Split(','); //First process all the S3 buckets and stream right into Rackspace container. WriteLog("Listing S3 Buckets"); ListBucketsResponse response = S3Client.ListBuckets(); WriteLog("Found " + response.Buckets.Count() + " buckets"); foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == config.Element("root").Element("S3BackupBucket").Value) { WriteLog("Skipping " + bucket.BucketName + " as backup folder"); } else if (excludeBuckets.Contains(bucket.BucketName)) { WriteLog("Skipping " + bucket.BucketName + " as in exclusions"); } else { //We need to know if the bucket is in the right region, otherwise it will error GetBucketLocationResponse locResponse = S3Client.GetBucketLocation(new GetBucketLocationRequest().WithBucketName(bucket.BucketName)); if (locResponse.Location == config.Element("root").Element("AWSRegion").Value) { WriteLog("Processing " + bucket.BucketName); //Get list of files ListObjectsRequest request = new ListObjectsRequest(); request.BucketName = bucket.BucketName; do { ListObjectsResponse filesResponse = S3Client.ListObjects(request); WriteLog("Found " + filesResponse.S3Objects.Count() + " files"); if (filesResponse.IsTruncated) { WriteLog("there are additional pages of files"); } foreach (S3Object file in filesResponse.S3Objects) { bool bolTransfer = false; //See if it exists on Rackspace string uri = RSConnection.StorageUrl + "/" + config.Element("root").Element("RSBackupContainer").Value + "/" + bucket.BucketName + "/" + file.Key; try { var req = (HttpWebRequest)WebRequest.Create(uri); req.Headers.Add("X-Auth-Token", RSConnection.AuthToken); req.Method = "HEAD"; //Compare Etags to see if we need to sync using (var resp = req.GetResponse() as HttpWebResponse) { if ("\"" + resp.Headers["eTag"] + "\"" != file.ETag) { bolTransfer = true; } } } catch (System.Net.WebException e) { if (e.Status == WebExceptionStatus.ProtocolError && ((HttpWebResponse)e.Response).StatusCode == HttpStatusCode.NotFound) { //Item not found, so upload bolTransfer = true; } //WriteLog("End Request to " + uri); } if (file.StorageClass == "GLACIER") { bolTransfer = false; //We can't get things out of Glacier, but they aer still listed here. } if (bolTransfer) { WriteLog("Syncing " + file.Key); using (GetObjectResponse getResponse = S3Client.GetObject(new GetObjectRequest().WithBucketName(bucket.BucketName).WithKey(file.Key))) { using (Stream s = getResponse.ResponseStream) { //We can stream right from s3 to CF, no need to store in memory or filesystem. var req = (HttpWebRequest)WebRequest.Create(uri); req.Headers.Add("X-Auth-Token", RSConnection.AuthToken); req.Method = "PUT"; req.SendChunked = true; req.AllowWriteStreamBuffering = false; req.Timeout = -1; using (Stream stream = req.GetRequestStream()) { byte[] data = new byte[8192]; int bytesRead = 0; while ((bytesRead = s.Read(data, 0, data.Length)) > 0) { stream.Write(data, 0, bytesRead); } stream.Flush(); stream.Close(); } req.GetResponse().Close(); } } intTransferred++; bytesTransferred += file.Size; } else { WriteLog("Skipping " + file.Key); intSkipped++; } //Check our exit condition if (!keepRunning) { break; } } //Loop if there is more than 1000 files if (filesResponse.IsTruncated) { request.Marker = filesResponse.NextMarker; } else { request = null; } if (!keepRunning) { break; } } while (request != null); } } if (!keepRunning) { break; } } //Now get all the Rackspace containers and stream them to Amazon WriteLog("Listing CF Containers"); List <string> lstContainers = RSConnection.GetContainers(); WriteLog("Found " + lstContainers.Count() + " containers"); foreach (string container in lstContainers) { if (container == config.Element("root").Element("RSBackupContainer").Value) { WriteLog("Skipping " + container + " as backup folder"); } else if (excludeContainers.Contains(container)) { WriteLog("Skipping " + container + " as in exclusions"); } else { WriteLog("Processing " + container); XmlDocument containerInfo = RSConnection.GetContainerInformationXml(container); do { int filesCount = containerInfo.GetElementsByTagName("object").Count; WriteLog("Found " + filesCount + " files"); foreach (XmlNode file in containerInfo.GetElementsByTagName("object")) { bool bolTransfer = false; string strBucketName = config.Element("root").Element("S3BackupBucket").Value; string strKey = container + file.SelectSingleNode("name").InnerText; //See if the file exists on s3 try { GetObjectMetadataResponse metaResp = S3Client.GetObjectMetadata(new GetObjectMetadataRequest().WithBucketName(strBucketName).WithKey(strKey)); //Compare the etags if (metaResp.ETag != "\"" + file.SelectSingleNode("hash").InnerText + "\"") { bolTransfer = true; } } catch (Amazon.S3.AmazonS3Exception e) { bolTransfer = true; } if (bolTransfer) { WriteLog("Syncing " + file.SelectSingleNode("name").InnerText); //God the C# binding sucks, so let's stream manually string uri = RSConnection.StorageUrl + "/" + container + "/" + file.SelectSingleNode("name").InnerText; var req = (HttpWebRequest)WebRequest.Create(uri); req.Headers.Add("X-Auth-Token", RSConnection.AuthToken); req.Method = "GET"; using (var resp = req.GetResponse() as HttpWebResponse) { using (Stream s = resp.GetResponseStream()) { string today = String.Format("{0:ddd,' 'dd' 'MMM' 'yyyy' 'HH':'mm':'ss' 'zz00}", DateTime.Now); string stringToSign = "PUT\n" + "\n" + file.SelectSingleNode("content_type").InnerText + "\n" + "\n" + "x-amz-date:" + today + "\n" + "/" + strBucketName + "/" + strKey; Encoding ae = new UTF8Encoding(); HMACSHA1 signature = new HMACSHA1(ae.GetBytes(AWSSecret)); string encodedCanonical = Convert.ToBase64String(signature.ComputeHash(ae.GetBytes(stringToSign))); string authHeader = "AWS " + AWSKey + ":" + encodedCanonical; string uriS3 = "https://" + strBucketName + ".s3.amazonaws.com/" + strKey; var reqS3 = (HttpWebRequest)WebRequest.Create(uriS3); reqS3.Headers.Add("Authorization", authHeader); reqS3.Headers.Add("x-amz-date", today); reqS3.ContentType = file.SelectSingleNode("content_type").InnerText; reqS3.ContentLength = Convert.ToInt32(file.SelectSingleNode("bytes").InnerText); reqS3.Method = "PUT"; reqS3.AllowWriteStreamBuffering = false; if (reqS3.ContentLength == -1L) { reqS3.SendChunked = true; } using (Stream streamS3 = reqS3.GetRequestStream()) { byte[] data = new byte[32768]; int bytesRead = 0; while ((bytesRead = s.Read(data, 0, data.Length)) > 0) { streamS3.Write(data, 0, bytesRead); } streamS3.Flush(); streamS3.Close(); } reqS3.GetResponse().Close(); } } intTransferred++; bytesTransferred += Convert.ToInt64(file.SelectSingleNode("bytes").InnerText); } else { WriteLog("Skipping " + file.SelectSingleNode("name").InnerText); intSkipped++; } //Check our exit condition if (!keepRunning) { break; } } if (filesCount < 10000) { containerInfo = null; } else { //Fetch the next list, but the Rackspace binding doesn't support markers with XML responses.... try { string uri = RSConnection.StorageUrl + "/" + container + "?format=xml&marker=" + Uri.EscapeUriString(containerInfo.FirstChild.NextSibling.LastChild.SelectSingleNode("name").InnerText); var req = (HttpWebRequest)WebRequest.Create(uri); req.Headers.Add("X-Auth-Token", RSConnection.AuthToken); req.Method = "GET"; using (var resp = req.GetResponse() as HttpWebResponse) { using (var reader = new System.IO.StreamReader(resp.GetResponseStream(), ASCIIEncoding.ASCII)) { string responseText = reader.ReadToEnd(); containerInfo.LoadXml(responseText); } } } catch (System.Net.WebException e) { if (e.Status == WebExceptionStatus.ProtocolError && ((HttpWebResponse)e.Response).StatusCode == HttpStatusCode.NotFound) { containerInfo = null; } } } } while (containerInfo != null); } } if (keepRunning) { WriteLog("Completed"); PrintSummary(); } }
public static System.IO.MemoryStream GetFile(AmazonS3 s3Client, string filekey) { using (s3Client) { S3_KEY = filekey; System.IO.MemoryStream file = new System.IO.MemoryStream(); try { GetObjectResponse r = s3Client.GetObject(new GetObjectRequest() { BucketName = BUCKET_NAME, Key = S3_KEY }); try { long transferred = 0L; System.IO.BufferedStream stream2 = new System.IO.BufferedStream(r.ResponseStream); byte[] buffer = new byte[0x2000]; int count = 0; while ((count = stream2.Read(buffer, 0, buffer.Length)) > 0) { file.Write(buffer, 0, count); } } finally { } return file; } catch (AmazonS3Exception) { //Show exception } } return null; }