public GetObject ( GetObjectRequest request ) : GetObjectResponse | ||
request | GetObjectRequest | Container for the necessary parameters to execute the GetObject service method. |
Résultat | GetObjectResponse |
static void Main() { // Connect to Amazon S3 service with authentication BasicAWSCredentials basicCredentials = new BasicAWSCredentials("AKIAIIYG27E27PLQ6EWQ", "hr9+5JrS95zA5U9C6OmNji+ZOTR+w3vIXbWr3/td"); AmazonS3Client s3Client = new AmazonS3Client(basicCredentials); // Display all S3 buckets ListBucketsResponse buckets = s3Client.ListBuckets(); foreach (var bucket in buckets.Buckets) { Console.WriteLine(bucket.BucketName); } // Display and download the files in the first S3 bucket string bucketName = buckets.Buckets[0].BucketName; Console.WriteLine("Objects in bucket '{0}':", bucketName); ListObjectsResponse objects = s3Client.ListObjects(new ListObjectsRequest() { BucketName = bucketName }); foreach (var s3Object in objects.S3Objects) { Console.WriteLine("\t{0} ({1})", s3Object.Key, s3Object.Size); if (s3Object.Size > 0) { // We have a file (not a directory) --> download it GetObjectResponse objData = s3Client.GetObject( new GetObjectRequest() { BucketName = bucketName, Key = s3Object.Key }); string s3FileName = new FileInfo(s3Object.Key).Name; SaveStreamToFile(objData.ResponseStream, s3FileName); } } // Create a new directory and upload a file in it string path = "uploads/new_folder_" + DateTime.Now.Ticks; string newFileName = "example.txt"; string fullFileName = path + "/" + newFileName; string fileContents = "This is an example file created through the Amazon S3 API."; s3Client.PutObject(new PutObjectRequest() { BucketName = bucketName, Key = fullFileName, ContentBody = fileContents} ); Console.WriteLine("Created a file in Amazon S3: {0}", fullFileName); // Share the uploaded file and get a download URL string uploadedFileUrl = s3Client.GetPreSignedURL(new GetPreSignedUrlRequest() { BucketName = bucketName, Key = fullFileName, Expires = DateTime.Now.AddYears(5) }); Console.WriteLine("File download URL: {0}", uploadedFileUrl); System.Diagnostics.Process.Start(uploadedFileUrl); }
// methods public void Run(Object obj) { // counter for retires if (retry > 0) { // make the amazon client AmazonS3Client s3Client = new AmazonS3Client(awsKey, awsSecret, RegionEndpoint.GetBySystemName(region)); try { // request object with file GetObjectRequest req = new GetObjectRequest(); req.BucketName = bucket; req.Key = key; // get the object from s3 GetObjectResponse res = s3Client.GetObject(req); // test for paths in key name if (file.IndexOf('/') > 0) { string filepath = file.Remove(file.LastIndexOf('/')).Replace('/', '\\'); if (!Directory.Exists(filepath)) { Directory.CreateDirectory(filepath); } } // establish a local file to send to FileStream fs = File.Create(file.Replace('/', '\\')); // transfer the stream to a file byte[] buffer = new byte[8 * 1024]; int len; while ((len = res.ResponseStream.Read(buffer, 0, buffer.Length)) > 0) { fs.Write(buffer, 0, len); } // close streams res.ResponseStream.Close(); fs.Close(); // feedback Console.WriteLine("File '" + key + "' in bucket '" + bucket + "' has been downloaded"); } catch (Exception e) { // output error Console.WriteLine("Warning getting file '" + key + "' in bucket '" + bucket + "': " + e.Message + " (operation will be retried)"); // lower the counter and rerun retry = retry - 1; Run(obj); } } else { // exceeded retries, time to output error Console.WriteLine("Error adding file '" + file + "' in bucket '" + bucket + "'"); } }
public Guid? GetTargetAppVersion(Guid targetKey, Guid appKey) { try { using (var client = new AmazonS3Client(Context.AwsAccessKeyId, Context.AwsSecretAccessKey)) { using (var res = client.GetObject(new GetObjectRequest() { BucketName = Context.BucketName, Key = GetTargetAppVersionInfoPath(targetKey, appKey), })) { using (var stream = res.ResponseStream) { return Utils.Serialisation.ParseKey(stream); } } } } catch (AmazonS3Exception awsEx) { if (awsEx.StatusCode == System.Net.HttpStatusCode.NotFound) { return null; } else { throw new DeploymentException(string.Format("Failed getting version for app with key \"{0}\" and target with the key \"{1}\".", appKey, targetKey), awsEx); } } }
static void Main(string[] args) { try { var client = new AmazonS3Client(); PutObjectResponse putResponse = client.PutObject(new PutObjectRequest { BucketName = BUCKET_NAME, FilePath = TEST_FILE }); GetObjectResponse getResponse = client.GetObject(new GetObjectRequest { BucketName = BUCKET_NAME, Key = TEST_FILE }); getResponse.WriteResponseStreamToFile(@"c:\talk\" + TEST_FILE); var url = client.GetPreSignedURL(new GetPreSignedUrlRequest { BucketName = BUCKET_NAME, Key = TEST_FILE, Expires = DateTime.Now.AddHours(1) }); OpenURL(url); } catch(Exception e) { Console.WriteLine(e.Message); } }
public IComputeNode GetComputeNode() { IComputeNode compute_node = null; try { //amazon client using (var client = new AmazonS3Client()) { //download request using (var response = client.GetObject(new GetObjectRequest() .WithBucketName(AmazonBucket) .WithKey(BermudaConfig))) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { //read the file string data = reader.ReadToEnd(); //deserialize compute_node = new ComputeNode().DeserializeComputeNode(data); if(compute_node.Catalogs.Values.Cast<ICatalog>().FirstOrDefault().CatalogMetadata.Tables.FirstOrDefault().Value.DataType == null) compute_node.Catalogs.Values.Cast<ICatalog>().FirstOrDefault().CatalogMetadata.Tables.FirstOrDefault().Value.DataType = typeof(UDPTestDataItems); compute_node.Init(CurrentInstanceIndex, AllNodeEndpoints.Count()); } } } } catch (Exception ex) { Trace.WriteLine(ex.ToString()); } return compute_node; }
public IComputeNode GetComputeNode() { IComputeNode compute_node = null; try { //amazon client using (var client = new AmazonS3Client()) { //download request using (var response = client.GetObject(new GetObjectRequest() .WithBucketName(AmazonBucket) .WithKey(BermudaConfig))) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { //read the file string data = reader.ReadToEnd(); //deserialize compute_node = new ComputeNode().DeserializeComputeNode(data); compute_node.Init(CurrentInstanceIndex, AllNodeEndpoints.Count()); } } } } catch (Exception ex) { Trace.WriteLine(ex.ToString()); } return compute_node; }
public static void Main(string[] args) { // Create a client AmazonS3Client client = new AmazonS3Client(); string time = PutObj(client); // Create a GetObject request GetObjectRequest getObjRequest = new GetObjectRequest { BucketName = "com.loofah.photos", Key = time }; System.Console.WriteLine(time); // Issue request and remember to dispose of the response using (GetObjectResponse getObjResponse = client.GetObject(getObjRequest)) { getObjResponse.WriteResponseStreamToFile("C:\\Users\\Ryan\\Pictures\\" + time + ".jpg", false); } System.Console.Read(); }
// Sets up the student's input bucket with sample data files retrieved from the lab bucket. public static void Setup(AmazonS3Client s3ForStudentBuckets) { RegionEndpoint region = RegionEndpoint.USWest2; AmazonS3Client s3ForLabBucket; string textContent = null; bool exist = false; s3ForLabBucket = new AmazonS3Client(region); DataTransformer.CreateBucket(DataTransformer.InputBucketName); for (int i = 0; i < labBucketDataFileKeys.Length; i++) { GetObjectRequest requestForStream = new GetObjectRequest { BucketName = labS3BucketName, Key = labBucketDataFileKeys[i] }; using (GetObjectResponse responseForStream = s3ForLabBucket.GetObject(requestForStream)) { using (StreamReader reader = new StreamReader(responseForStream.ResponseStream)) { textContent = reader.ReadToEnd(); PutObjectRequest putRequest = new PutObjectRequest { BucketName = DataTransformer.InputBucketName, Key = labBucketDataFileKeys[i].ToString().Split('/').Last(), ContentBody = textContent }; putRequest.Metadata.Add("ContentLength", responseForStream.ContentLength.ToString()); s3ForStudentBuckets.PutObject(putRequest); } } } }
public static GetObjectResponse GetFile(AwsCommonParams commonParams, string bucketName, string filePath) { // We need to strip off any leading '/' in the path or // else it creates a path with an empty leading segment // This also implements behavior consistent with the // edit counterpart routine for verification purposes if (filePath.StartsWith("/")) { filePath = filePath.Substring(1); } using (var s3 = new Amazon.S3.AmazonS3Client( commonParams.ResolveCredentials(), commonParams.RegionEndpoint)) { var s3Requ = new Amazon.S3.Model.GetObjectRequest { BucketName = bucketName, //Prefix = filePath, Key = filePath, }; //var s3Resp = s3.ListObjects(s3Requ); try { var s3Resp = s3.GetObject(s3Requ); return(s3Resp); } catch (AmazonS3Exception ex) when(ex.StatusCode == HttpStatusCode.NotFound) { return(null); } } }
public EntityIndex LoadIndex(string path) { try { using (var client = new AmazonS3Client(Context.AwsAccessKeyId, Context.AwsSecretAccessKey)) { using (var res = client.GetObject(new GetObjectRequest() { BucketName = Context.BucketName, Key = path, })) { var index = new EntityIndex() { ETag = res.ETag }; using (var stream = res.ResponseStream) { index.Entries = ParseIndex(stream); } return index; } } } catch (AmazonS3Exception awsEx) { if (awsEx.StatusCode == System.Net.HttpStatusCode.NotFound) { return new EntityIndex() { Entries = new List<EntityIndexEntry>(), ETag = "" }; } else { throw new DeploymentException("Failed loading group index", awsEx); } } }
public static void DownloadFiles(string accessKey, string secretKey, string bucketName, string s3FolderName, string saveFolder, bool removeFromS3AfterDownload, Action<GetObjectResponse> onFileDownload = null, Action<DeleteObjectResponse> onFileDelete = null) { Log.DebugFormat("Starting download of files like '{0}' to '{1}'", bucketName + "/" + s3FolderName, saveFolder); if (!Directory.Exists(saveFolder)) { Log.ErrorFormat("Couldn't find folder {0}", saveFolder); throw new ArgumentException(string.Format("Could not find folder {0}", saveFolder)); } using (var client = new AmazonS3Client(accessKey, secretKey, new AmazonS3Config { ServiceURL = "http://s3.amazonaws.com" })) { var marker = default(string); do { var listResponse = ListFiles(accessKey, secretKey, bucketName, s3FolderName, marker); foreach (var f in listResponse.S3Objects.Select(x => x.Key)) { var req = new GetObjectRequest { BucketName = bucketName, Key = f }; var match = Regex.Match(f, FilepathPattern); var filename = match.Groups["filename_without_extension"].Value; var extension = match.Groups["extension"].Value; var savePath = Path.Combine(saveFolder, filename + "." + extension); var transferPath = savePath + ".tran"; Log.DebugFormat("Downloading '{0}' to '{1}'", bucketName + "/" + f, transferPath); var res = client.GetObject(req); if (onFileDownload != null) { Log.Debug("Running onFileDownload filter"); onFileDownload(res); } res.WriteResponseStreamToFile(transferPath, false); if (removeFromS3AfterDownload) { var deleteRequest = new DeleteObjectRequest { BucketName = bucketName, Key = f }; Log.DebugFormat("Deleting '{0}' from S3", bucketName + "/" + f); var deleteResponse = client.DeleteObject(deleteRequest); if (onFileDelete != null) { Log.Debug("Running onFileDelete filter"); onFileDelete(deleteResponse); } } //try to move the file to it's original save spot Log.DebugFormat("Moving file '{0}' to '{1}'", transferPath, savePath); for (var retryCount = 0; retryCount < 3; retryCount++) { try { File.Move(transferPath, savePath); break; } catch (Exception ex) { if (retryCount == 2) { Log.Error("Failed to move file. Exceeded retry count", ex); throw; } Log.ErrorFormat("Failed to move file from '{0}', to '{1}'. Retry: {2}", transferPath, savePath, retryCount); Thread.Sleep(1000); } } } marker = listResponse.IsTruncated ? listResponse.NextMarker : default(string); } while (marker != default(string)); } Log.Debug("Finished downloading files from s3"); }
private void DownloadClick(object sender, RoutedEventArgs e) { TaskDialogOptions o = new TaskDialogOptions { ShowMarqueeProgressBar = true, MainInstruction = "Press OK to download selected release... (MCLauncher will freeze! Do not close!)", MainIcon = VistaTaskDialogIcon.Information, EnableCallbackTimer = true, CustomButtons = new [] { "Cancel", "OK" } }; string SecretKey = null; string PublicKey = null; Release Selected = (Release)JarList.SelectedItem; AmazonS3Client Client = new AmazonS3Client(PublicKey, SecretKey); GetObjectRequest Request = new GetObjectRequest { BucketName = "assets.minecraft.net", Key = Selected.Key }; GetObjectResponse Result; TaskDialogResult tdr = TaskDialog.Show(o); if (tdr.CustomButtonResult == 0) return; Result = Client.GetObject(Request); Directory.CreateDirectory(Globals.LauncherDataPath + "/Minecraft/bin/"); try { File.Copy(Globals.LauncherDataPath + "/Minecraft/bin/minecraft.jar", Globals.LauncherDataPath + "/Minecraft/OldMinecraft.jar", true); File.Delete(Globals.LauncherDataPath + "/Minecraft/bin/minecraft.jar"); } catch (FileNotFoundException ex) { } Result.WriteResponseStreamToFile(Globals.LauncherDataPath + "/Minecraft/bin/minecraft.jar"); }
public ActionResult Descarregar(int Id) { Log.Info("Descarregar document " + Id); using (MySqlConnection connection = new MySqlConnection(ConnectionString)) { MySqlCommand cmd = new MySqlCommand("SELECT Nom, MimeType, KeyAmazon FROM Documents WHERE Id = @Id", connection); cmd.Parameters.AddWithValue("@Id", Id); connection.Open(); MySqlDataReader reader = cmd.ExecuteReader(); if (reader.Read()) { string Nom = reader.GetString(reader.GetOrdinal("Nom")); string MimeType = reader.GetString(reader.GetOrdinal("MimeType")); string KeyAmazon = reader.GetString(reader.GetOrdinal("KeyAmazon")); using (IAmazonS3 client = new AmazonS3Client(AmazonEndPoint)) { GetObjectRequest getRequest = new GetObjectRequest(); getRequest.BucketName = "hotnotes"; getRequest.Key = KeyAmazon; using (GetObjectResponse response = client.GetObject(getRequest)) { MemoryStream ms = new MemoryStream(); response.ResponseStream.CopyTo(ms); char[] separator = new char[1]; separator[0] = '.'; string[] parts = response.Key.Split(separator); string extensio = parts[parts.Length - 1]; return File(ms.ToArray(), MimeType, Nom + "." + extensio); } } } } return View(); }
public void PullVersion(Guid key, DirectoryInfo directory, bool mergeExistingFiles = false) { if (!directory.Exists) throw new ArgumentException("Directory must exist.", "directory"); if (!VersionExists(key)) throw new VersionNotFoundException(string.Format("Could not find the version with key: {0}", key)); if (!mergeExistingFiles) { if (directory.EnumerateFileSystemInfos().Any()) throw new ArgumentException("Target directory is not empty."); } try { var ignorePaths = new string[1] { ".info" }; using (var client = new AmazonS3Client(Context.AwsAccessKeyId, Context.AwsSecretAccessKey)) { bool more = true; string lastResult = null; string prefix = string.Format("{0}/{1}/", STR_VERSIONS_CONTAINER_PATH, key.ToString("N")); while (more) { using (var listResponse = client.ListObjects(new ListObjectsRequest() { BucketName = Context.BucketName, Prefix = prefix, Delimiter = lastResult, })) { listResponse.S3Objects.Where(obj => !ignorePaths.Any(ignore => obj.Key == String.Format("{0}{1}", prefix, ignore))) .AsParallel().ForAll(s3obj => { using (var getResponse = client.GetObject(new GetObjectRequest() { BucketName = Context.BucketName, Key = s3obj.Key, })) { getResponse.WriteResponseStreamToFile(Utils.Files.GetLocalAbsolutePath(s3obj.Key, prefix, directory.FullName)); } }); if (listResponse.IsTruncated) { more = true; } more = listResponse.IsTruncated; lastResult = listResponse.S3Objects.Last().Key; } } } } catch (Exception ex) { throw new DeploymentException(string.Format("Failed pushing to version with key \"{0}\"", key), ex); } }
public bool S3ObjectExists(string bucket, string key) { string AWSAccessKey = ConfigurationManager.AppSettings["AWSAccessKey"]; string AWSSecretKey = ConfigurationManager.AppSettings["AWSSecretKey"]; using (AmazonS3Client client = new AmazonS3Client(AWSAccessKey, AWSSecretKey)) { GetObjectRequest request = new GetObjectRequest(); request.BucketName = bucket; request.Key = key; try { S3Response response = client.GetObject(request); if (response.ResponseStream != null) { return true; } } catch (AmazonS3Exception) { return false; } catch (WebException) { return false; } catch (Exception) { return false; } } return false; }
/// <summary> /// downloads the file from S3 to temp storage and reads it /// </summary> /// <param name="PathName"></param> /// <param name="FileName"></param> /// <param name="Data"></param> /// <returns></returns> public bool ReadFile(string PathName, string FileName, out string Data) { Data = null; try { //amazon client using (var client = new AmazonS3Client()) { //download request using(var response = client.GetObject(new GetObjectRequest() .WithBucketName(PathName) .WithKey(FileName))) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { //read the file Data = reader.ReadToEnd(); return true; } } } } catch (Exception ex) { Trace.WriteLine(ex.ToString()); } return false; }
/// <summary>AWS S3 객체 읽어오기</summary> public Stream ReadObjectData(string pKey) { try { using (AmazonS3Client client = new AmazonS3Client()) { GetObjectRequest request = new GetObjectRequest { BucketName = strAwsBucketName, Key = pKey }; GetObjectResponse response = client.GetObject(request); return response.ResponseStream; } } catch (AmazonS3Exception amazonS3Exception) { throw amazonS3Exception; } }
public LogEntry GetLogEntry(Guid instanceKey, DateTime timestamp, LogStatus status) { try { using (var client = new AmazonS3Client(Context.AwsAccessKeyId, Context.AwsSecretAccessKey)) { using (var res = client.GetObject(new GetObjectRequest() { BucketName = Context.BucketName, Key = GetInstanceLogEntryPath(instanceKey, timestamp, status), })) { using (var stream = res.ResponseStream) { return new LogEntry(stream); } } } } catch (AmazonS3Exception awsEx) { if (awsEx.StatusCode == System.Net.HttpStatusCode.NotFound) { throw new LogEntryNotFoundException(string.Format("Could not find the log entry at timestamp {0} for instance with key: {1}", timestamp, instanceKey), awsEx); } else { throw new DeploymentException(string.Format("Failed getting the log entry at timestamp {0} for instance with key: {1}", timestamp, instanceKey), awsEx); } } }
public Stream GetFileStream(string keyName) { using (var s3 = new AmazonS3Client(_connectionInfo.AccessKey, _connectionInfo.SecretKey, new AmazonS3Config {ServiceURL = "http://s3.amazonaws.com"})) { var req = new GetObjectRequest { BucketName = _connectionInfo.BucketName, Key = keyName }; var response = s3.GetObject(req); return response.ResponseStream; } }
public Instance GetInstance(Guid instanceKey) { try { using (var client = new AmazonS3Client(Context.AwsAccessKeyId, Context.AwsSecretAccessKey)) { using (var res = client.GetObject(new GetObjectRequest() { BucketName = Context.BucketName, Key = string.Format("{0}/{1}/{2}", STR_INSTANCES_CONTAINER_PATH, instanceKey.ToString("N"), STR_INFO_FILE_NAME), })) { using (var stream = res.ResponseStream) { return new Instance(stream); } } } } catch (AmazonS3Exception awsEx) { if (awsEx.StatusCode == System.Net.HttpStatusCode.NotFound) { throw new InstanceNotFoundException(string.Format("Could not find the instance with key: {0}", instanceKey), awsEx); } else { throw new DeploymentException(string.Format("Failed getting instance with key \"{0}\"", instanceKey), awsEx); } } }
/* Sample call for upload:- byte[] array = new byte[1024*1024*1024]; Random random = new Random(); random.NextBytes(array); double timeTaken_Upload = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Upload, "fooContainer", "fooBlob"); double timeTaken_Download = Experiment.doRawCloudPerf(array, SynchronizerType.Azure, SynchronizeDirection.Download, "fooContainer", "fooBlob"); * * */ public static double doRawCloudPerf(byte[] input, SynchronizerType synchronizerType, SynchronizeDirection syncDirection, string exp_directory, Logger logger, string containerName=null, string blobName=null) { string accountName = ConfigurationManager.AppSettings.Get("AccountName"); string accountKey = ConfigurationManager.AppSettings.Get("AccountSharedKey"); DateTime begin=DateTime.Now, end=DateTime.Now; if (synchronizerType == SynchronizerType.Azure) { #region azure download/upload if (containerName==null) containerName = "testingraw"; if(blobName==null) blobName = Guid.NewGuid().ToString(); CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentialsAccountAndKey(accountName, accountKey), true); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(containerName); if (syncDirection == SynchronizeDirection.Upload) { logger.Log("Start Stream Append"); container.CreateIfNotExist(); begin = DateTime.UtcNow;////////////////////////////////////// try { using (MemoryStream memoryStream = new System.IO.MemoryStream(input)) { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); blockBlob.UploadFromStream(memoryStream); } } catch (Exception e) { } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Append"); } if (syncDirection == SynchronizeDirection.Download) { logger.Log("Start Stream Get"); logger.Log("Start Stream GetAll"); try { CloudBlockBlob blockBlob = container.GetBlockBlobReference(blobName); byte[] blobContents = blockBlob.DownloadByteArray(); //if (File.Exists(blobName)) // File.Delete(blobName); begin = DateTime.UtcNow;////////////////////////////////////// // using (FileStream fs = new FileStream(blobName, FileMode.OpenOrCreate)) // { byte[] contents = blockBlob.DownloadByteArray(); // fs.Write(contents, 0, contents.Length); // } } catch (Exception e) { } end = DateTime.UtcNow;////////////////////////////////////// logger.Log("End Stream Get"); logger.Log("End Stream GetAll"); } #endregion } else if (synchronizerType == SynchronizerType.AmazonS3) { #region amazon s3 stuff if (containerName == null) containerName = "testingraw"; if (blobName == null) blobName = Guid.NewGuid().ToString(); AmazonS3Client amazonS3Client = new AmazonS3Client(accountName, accountKey); if (syncDirection == SynchronizeDirection.Upload) { ListBucketsResponse response = amazonS3Client.ListBuckets(); foreach (S3Bucket bucket in response.Buckets) { if (bucket.BucketName == containerName) { break; } } amazonS3Client.PutBucket(new PutBucketRequest().WithBucketName(containerName)); begin = DateTime.UtcNow;////////////////////////////////////// MemoryStream ms = new MemoryStream(); ms.Write(input, 0, input.Length); PutObjectRequest request = new PutObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); request.InputStream = ms; amazonS3Client.PutObject(request); end = DateTime.UtcNow;////////////////////////////////////// } if (syncDirection == SynchronizeDirection.Download) { if (File.Exists(blobName)) File.Delete(blobName); begin = DateTime.UtcNow;////////////////////////////////////// GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(containerName); request.WithKey(blobName); GetObjectResponse response = amazonS3Client.GetObject(request); var localFileStream = File.Create(blobName); response.ResponseStream.CopyTo(localFileStream); localFileStream.Close(); end = DateTime.UtcNow;////////////////////////////////////// } #endregion } else { throw new InvalidDataException("syncronizer type is not valid"); } return (end - begin).TotalMilliseconds;// return total time to upload in milliseconds }
public static GetObjectResponse GetFile(AwsCommonParams commonParams, string bucketName, string filePath) { // We need to strip off any leading '/' in the path or // else it creates a path with an empty leading segment // This also implements behavior consistent with the // edit counterpart routine for verification purposes if (filePath.StartsWith("/")) filePath = filePath.Substring(1); using (var s3 = new Amazon.S3.AmazonS3Client( commonParams.ResolveCredentials(), commonParams.RegionEndpoint)) { var s3Requ = new Amazon.S3.Model.GetObjectRequest { BucketName = bucketName, //Prefix = filePath, Key = filePath, }; //var s3Resp = s3.ListObjects(s3Requ); try { var s3Resp = s3.GetObject(s3Requ); return s3Resp; } catch (AmazonS3Exception ex) when (ex.StatusCode == HttpStatusCode.NotFound) { return null; } } }
public void ReplaceAdImages(ref Ad ad, FileName[] filenames) { string newFileName = ""; int count = 1; var id = ad.Id; var imaa = db.AdImages.Where(x => x.adId.Equals(id)).Count(); count = imaa + 1; for (int i = 1; i < filenames.Length; i++) { IAmazonS3 client; try { using (client = new AmazonS3Client(Amazon.RegionEndpoint.USEast1)) { GetObjectRequest request = new GetObjectRequest { BucketName = _bucketName, Key = _folderName + filenames[i].fileName }; using (GetObjectResponse response = client.GetObject(request)) { string filename = filenames[i].fileName; if (!System.IO.File.Exists(filename)) { string extension = System.IO.Path.GetExtension(filenames[i].fileName); newFileName = ad.Id.ToString() + "_" + count + extension; client = new AmazonS3Client(Amazon.RegionEndpoint.USEast1); CopyObjectRequest request1 = new CopyObjectRequest() { SourceBucket = _bucketName, SourceKey = _folderName + filename, DestinationBucket = _bucketName, CannedACL = S3CannedACL.PublicRead,//PERMISSION TO FILE PUBLIC ACCESIBLE DestinationKey = _folderName + newFileName }; CopyObjectResponse response1 = client.CopyObject(request1); AdImage image = new AdImage(); image.imageExtension = extension; image.adId = ad.Id; db.AdImages.Add(image); db.SaveChanges(); count++; DeleteObjectRequest deleteObjectRequest = new DeleteObjectRequest { BucketName = _bucketName, Key = _folderName + filenames[i].fileName }; AmazonS3Config config = new AmazonS3Config(); config.ServiceURL = "https://s3.amazonaws.com/"; using (client = Amazon.AWSClientFactory.CreateAmazonS3Client( _awsAccessKey, _awsSecretKey, config)) { client.DeleteObject(deleteObjectRequest); } } } } } catch (Exception e) { } } }
public static void DownloadFileFromBucket(string bucketName, string key, string filename) { using (var client = new AmazonS3Client(Settings.AccessKey, Settings.Secret)) { var request = new GetObjectRequest() { BucketName = bucketName, Key = key }; using (var response = client.GetObject(request)) { response.WriteResponseStreamToFile(filename); } } }
public static string ReadFromBucket(string bucketName, string key) { using (var client = new AmazonS3Client(Settings.AccessKey, Settings.Secret)) { var request = new GetObjectRequest() { BucketName = bucketName, Key = key }; return client.GetObject(request).ResponseStream.ToContentString(); } }
public void ObjectSamples() { { #region ListObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // List all objects ListObjectsRequest listRequest = new ListObjectsRequest { BucketName = "SampleBucket", }; ListObjectsResponse listResponse; do { // Get a list of objects listResponse = client.ListObjects(listRequest); foreach (S3Object obj in listResponse.S3Objects) { Console.WriteLine("Object - " + obj.Key); Console.WriteLine(" Size - " + obj.Size); Console.WriteLine(" LastModified - " + obj.LastModified); Console.WriteLine(" Storage class - " + obj.StorageClass); } // Set the marker property listRequest.Marker = listResponse.NextMarker; } while (listResponse.IsTruncated); #endregion } { #region GetObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObject request GetObjectRequest request = new GetObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and remember to dispose of the response using (GetObjectResponse response = client.GetObject(request)) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { string contents = reader.ReadToEnd(); Console.WriteLine("Object - " + response.Key); Console.WriteLine(" Version Id - " + response.VersionId); Console.WriteLine(" Contents - " + contents); } } #endregion } { #region GetObjectMetadata Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObjectMetadata request GetObjectMetadataRequest request = new GetObjectMetadataRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and view the response GetObjectMetadataResponse response = client.GetObjectMetadata(request); Console.WriteLine("Content Length - " + response.ContentLength); Console.WriteLine("Content Type - " + response.Headers.ContentType); if (response.Expiration != null) { Console.WriteLine("Expiration Date - " + response.Expiration.ExpiryDate); Console.WriteLine("Expiration Rule Id - " + response.Expiration.RuleId); } #endregion } { #region PutObject Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", ContentBody = "This is sample content..." }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 2 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", FilePath = "contents.txt" }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 3 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", }; using (FileStream stream = new FileStream("contents.txt", FileMode.Open)) { request.InputStream = stream; // Put object PutObjectResponse response = client.PutObject(request); } #endregion } { #region DeleteObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectRequest request = new DeleteObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request client.DeleteObject(request); #endregion } { #region DeleteObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectsRequest request = new DeleteObjectsRequest { BucketName = "SampleBucket", Objects = new List<KeyVersion> { new KeyVersion() {Key = "Item1"}, // Versioned item new KeyVersion() { Key = "Item2", VersionId = "Rej8CiBxcZKVK81cLr39j27Y5FVXghDK", }, // Item in subdirectory new KeyVersion() { Key = "Logs/error.txt"} } }; try { // Issue request DeleteObjectsResponse response = client.DeleteObjects(request); } catch (DeleteObjectsException doe) { // Catch error and list error details DeleteObjectsResponse errorResponse = doe.Response; foreach (DeletedObject deletedObject in errorResponse.DeletedObjects) { Console.WriteLine("Deleted item " + deletedObject.Key); } foreach (DeleteError deleteError in errorResponse.DeleteErrors) { Console.WriteLine("Error deleting item " + deleteError.Key); Console.WriteLine(" Code - " + deleteError.Code); Console.WriteLine(" Message - " + deleteError.Message); } } #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region ListVersions Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Turn versioning on for a bucket client.PutBucketVersioning(new PutBucketVersioningRequest { BucketName = "SampleBucket", VersioningConfig = new S3BucketVersioningConfig { Status = "Enable" } }); // Populate bucket with multiple items, each with multiple versions PopulateBucket(client, "SampleBucket"); // Get versions ListVersionsRequest request = new ListVersionsRequest { BucketName = "SampleBucket" }; // Make paged ListVersions calls ListVersionsResponse response; do { response = client.ListVersions(request); // View information about versions foreach (var version in response.Versions) { Console.WriteLine("Key = {0}, Version = {1}, IsLatest = {2}, LastModified = {3}, Size = {4}", version.Key, version.VersionId, version.IsLatest, version.LastModified, version.Size); } request.KeyMarker = response.NextKeyMarker; request.VersionIdMarker = response.NextVersionIdMarker; } while (response.IsTruncated); #endregion } { #region Multipart Upload Sample int MB = (int)Math.Pow(2, 20); // Create a client AmazonS3Client client = new AmazonS3Client(); // Define input stream Stream inputStream = Create13MBDataStream(); // Initiate multipart upload InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1" }; InitiateMultipartUploadResponse initResponse = client.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up1Response = client.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up2Response = client.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream }; UploadPartResponse up3Response = client.UploadPart(uploadRequest); // List parts for current upload ListPartsRequest listPartRequest = new ListPartsRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = client.ListParts(listPartRequest); Debug.Assert(listPartResponse.Parts.Count == 3); // Complete the multipart upload CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartETags = new List<PartETag> { new PartETag { ETag = up1Response.ETag, PartNumber = 1 }, new PartETag { ETag = up2Response.ETag, PartNumber = 2 }, new PartETag { ETag = up3Response.ETag, PartNumber = 3 } } }; CompleteMultipartUploadResponse compResponse = client.CompleteMultipartUpload(compRequest); #endregion } }