public async Task <List <S3Object> > GetSourceFileListAsync(string bucketName, string path) { S3Service service = new S3Service(); List <S3Object> fileList = await service.ListingObjectsAsync(bucketName, path); return(fileList); }
async Task <bool> SaveImages(List <string> Images, int ReturnId) { try { foreach (var i in Images) { var img = new ReturnImage(); var imageResponse = await S3Service.UploadObject(i); var response = new JsonResult(new Object()); if (imageResponse.Success) { img.Image = $"https://pistis.s3.us-east-2.amazonaws.com/{imageResponse.FileName}"; } img.IsActive = true; img.ReturnId = ReturnId; db.ReturnImage.Add(img); } db.SaveChanges(); return(true); } catch (Exception ex) { return(false); } }
public ConfigurationViewModel() { _amazonS3Service = GetProvider <S3Service>(); // _config = _amazonS3Service.; Accounts = new ObservableCollection <AccountModelBase>(new List <AccountModelBase>()); }
/// <summary> /// Processes the request. /// </summary> /// <param name="context">The context.</param> protected override void ProcessRequest(System.Web.HttpContext context) { if (this._blobInfo == null) { throw new BlobDownloadException(400, "Not found"); } if (this._provider == null) { throw new BlobDownloadException(400, "Not found"); } if (!(this._provider is S3StorageProvider)) { throw new BlobDownloadException(400, "Not Found"); } S3StorageProvider provider = (S3StorageProvider)_provider; S3Service service = provider.Service; string url = String.Empty; if (_AuthEnable) { url = service.GetAuthorizedUrl(provider.BucketName, this._blobInfo.Uid.ToString(), DateTime.UtcNow.Add(provider.ExpirationTimeSpan)); } else { url = service.GetUrl(provider.BucketName, this._blobInfo.Uid.ToString()); } context.Response.Redirect(url); }
public override System.Xml.XmlNode Decrypt(System.Xml.XmlNode encryptedNode) { //note: in order to verify the protected configuration route without going via s3, use the following line //string xmlRaw = "<sampleConfig><settings sampleConfigSetting=\"Sucess. This Setting came from code.\"></settings></sampleConfig>"; //setup parameters we need to know (note: this could be extended to be further provider-based) string awsAccessKey, awsSecretKey, bucketName, objectKey; //collect parameter values XmlNode settingsNode = encryptedNode.SelectSingleNode("/EncryptedData/s3ProviderInfo"); awsAccessKey = settingsNode.Attributes["s3AccessKey"].Value; awsSecretKey = settingsNode.Attributes["s3SecretKey"].Value; bucketName = settingsNode.Attributes["s3BucketName"].Value; objectKey = settingsNode.Attributes["objectKey"].Value; //get value from s3 var service = new S3Service { AccessKeyID = awsAccessKey, SecretAccessKey = awsSecretKey, UseSsl = true, UseSubdomains = true }; string xmlRaw = service.GetObjectString(bucketName, objectKey); //cast to XmlDocument var doc = new XmlDocument(); doc.LoadXml(xmlRaw); //return node return(doc.ChildNodes[0]); }
public AmazonS3StorageSource(IAmazonStorageSettings settings) { _settings = settings; _service = new S3Service { AccessKeyID = _settings.AccessKey, SecretAccessKey = _settings.SecretAccessKey, UseSubdomains = true }; }
private static InsertSeiyuuHandler CreateHandler(SeiyuuMoeContext dbContext) { var scheduleConfiguration = ConfigurationReader.MalBgJobsScheduleConfiguration; var animeRepository = new AnimeRepository(dbContext); var seiyuuRepository = new SeiyuuRepository(dbContext); var characterRepository = new CharacterRepository(dbContext); var animeRoleRepository = new AnimeRoleRepository(dbContext); var seasonRepository = new SeasonRepository(dbContext); var jikanUrl = ConfigurationReader.JikanUrl;; var jikanConfiguration = new JikanClientConfiguration { Endpoint = jikanUrl, SuppressException = true }; var jikanClient = new Jikan(jikanConfiguration); var jikanService = new JikanService(jikanClient); var s3Client = new S3Service(); return(new InsertSeiyuuHandler( scheduleConfiguration.InsertSeiyuuBatchSize, scheduleConfiguration.DelayBetweenCallsInSeconds, seiyuuRepository, seasonRepository, characterRepository, animeRepository, animeRoleRepository, jikanService, s3Client )); }
public void TestDeleteObjectAsync() { var client = new S3Service(); String bucketName = Environment.GetEnvironmentVariable("SCREEN3_S3_BUCKET"); client.DeleteObject(bucketName, @"source/1997-2006.zip").Wait(); }
public void TestListObjectsAsync() { var client = new S3Service(); String bucketName = Environment.GetEnvironmentVariable("SCREEN3_S3_BUCKET"); client.ListingObjectsAsync(bucketName, "source").Wait(); }
public string GenerateImages([FromBody] GtrConfig config) { var process = new ImageCreationProcess(); var bytes = process.Run(config); var fileName = "Glory to Rome Images"; return(S3Service.UploadZipToS3(bytes, fileName)); }
public void TestDownloadFileFromS3Async() { var client = new S3Service(); String bucketName = Environment.GetEnvironmentVariable("SCREEN3_S3_BUCKET"); string targetFolder = "/tmp/screen3_temp_files/originSourceFiles/"; client.DownloadFileFromS3Async(bucketName, "source/1997-2006.zip", targetFolder).Wait(); }
public async Task <String> DownloadFileAsync(string bucketName, string keyName, string tempFolder) { LambdaLogger.Log($"In DownloadFileAsync. bucketName: {bucketName}, keyName: {keyName}, tempFolder: {tempFolder}.\n"); S3Service service = new S3Service(); String resultFileName = await service.DownloadFileFromS3Async(bucketName, keyName, tempFolder); return(resultFileName); }
public StockLoadController() { this.S3_Bucket_Name = Environment.GetEnvironmentVariable("SCREEN3_S3_BUCKET"); this.Temp_Folder = Environment.GetEnvironmentVariable("SCREEN3_TEMP_FOLDER"); this.s3service = new S3Service(); string asx300TableName = Environment.GetEnvironmentVariable("SCREEN3_ASX300_TABLE_NAME"); this.dal = new StockServiceDAL(asx300TableName); }
public override bool CreateFolder() { if (string.IsNullOrEmpty(FolderKey)) { throw new ArgumentNullException("name", "Folder name can not be null."); } S3Service.AddObject(bucketName, FolderKey, 0, null, stream => { }); return(true); }
internal virtual void InitTest(Settings settings) { s3 = new S3Service() { AccessKeyID = settings.AccessKeyID, SecretAccessKey = settings.SecretAccessKey }; bucket = new BucketContext() { BucketName = settings.TestBucketName, Service = s3 }; }
public void UploadToS3() { var service = new S3Service(new AwsCredentials { //S3 Service Region RegionEndpoint = RegionEndpoint.USEast1 }, "bucketname"); var content = service.Download("myfile.jpg"); }
public LitS3Service(string accessKey, string secretKey) { ServicePointManager.DefaultConnectionLimit = 100; client = new S3Service() { AccessKeyID = accessKey, SecretAccessKey = secretKey, UseSsl = false, UseSubdomains = true }; }
public ActionResult <string> GenerateAeonsEnd([FromBody] GenerateAeonsEndRequest request) { var expansionsByName = Enum.GetValues(typeof(Expansion)) .Cast <Expansion>() .ToDictionary(expansion => expansion.GetFriendlyName()); var selectedExpansions = request.SelectedExpansionNames .Select(expansionName => expansionsByName[expansionName]) .ToList(); var bytes = AeonsEndLabels.CreateLabels(selectedExpansions); return(S3Service.UploadPdfToS3(bytes, "AeonsEndLabels")); }
public ActionResult <string> GenerateLegendary([FromBody] GenerateLegendaryRequest request) { var expansionsByName = Enum.GetValues(typeof(Legendary.Enums.Expansion)) .Cast <Legendary.Enums.Expansion>() .ToDictionary(expansion => expansion.GetExpansionName()); var selectedExpansions = request.SelectedExpansionNames .Select(expansionName => expansionsByName[expansionName]) .ToList(); var bytes = LegendaryLabels.CreateLabels(selectedExpansions, request.IncludeSpecialSetupCards); return(S3Service.UploadPdfToS3(bytes, "LegendaryLabels")); }
static S3Service GetService() { S3Service service = new S3Service() { AccessKeyID = Program.Config.AccessKey, SecretAccessKey = Program.Config.SecretKey, Host = Program.Config.Region, }; service.BeforeAuthorize += service_BeforeAuthorize; return(service); }
public S3Store() { _S3Service = new S3Service() { AccessKeyID = System.Environment.GetEnvironmentVariable("AMAZON_ACCESS_KEY_ID"), SecretAccessKey = System.Environment.GetEnvironmentVariable("AMAZON_SECRET_ACCESS_KEY") }; if (_S3Service.QueryBucket(_bucketName) == BucketAccess.NoSuchBucket) { _S3Service.CreateBucket(_bucketName); } }
static void Main(string[] args) { var service = new S3Service() { AccessKeyID = "AKIAJ6F26BFDPKHVPHIA", SecretAccessKey = "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", }; var buckets = service.GetAllBuckets(); var objects = service .ListAllObjects("static.teamlab.com", "") .ToList(); }
public RecipeController( ApplicationDbContext context, IConfiguration configuration, UserManager <IdentityUser> userManager, SignInManager <IdentityUser> signInManager, RoleManager <IdentityRole> roleManager, IAmazonSimpleEmailService client, IAmazonS3 s3Client ) : base(context, configuration, roleManager, client, userManager, signInManager) { _s3Service = new S3Service(s3Client, _configuration); }
public static TestConnectivityResult testConnectivity() { try { S3Service service = GetService(); BucketAccess access = service.QueryBucket(Program.Config.BucketName); if (access.HasFlag(BucketAccess.NoSuchBucket)) { return(TestConnectivityResult.BUCKET_DOESNT_EXIST); } else if (access.HasFlag(BucketAccess.NotAccessible)) { return(TestConnectivityResult.INACCESSIBLE); } else if (access.HasFlag(BucketAccess.Accessible)) { return(TestConnectivityResult.OK); } else { return(TestConnectivityResult.UNKNOWN); } } catch (S3Exception e) { switch (e.ErrorCode) { case S3ErrorCode.PermanentRedirect: MessageBox.Show("You are accessing this bucket via the wrong region."); break; case S3ErrorCode.InvalidAccessKeyId: case S3ErrorCode.SignatureDoesNotMatch: MessageBox.Show("There was a problem using your credentials, please check them"); break; default: MessageBox.Show("[" + e.ErrorCode + "]: " + e.Message); break; } return(TestConnectivityResult.S3_EXCEPTION); } catch (Exception e) { return(TestConnectivityResult.EXCEPTION); } }
/// <summary> /// Initializes the specified name. /// </summary> /// <param name="name">The name.</param> /// <param name="config">The config.</param> public override void Initialize(string name, NameValueCollection config) { if (!((config["accessKeyID"] == null) || string.IsNullOrEmpty(config["accessKeyID"]))) { this.AccessKeyID = config["accessKeyID"]; } if (!((config["secretAccessKey"] == null) || string.IsNullOrEmpty(config["secretAccessKey"]))) { this.SecretAccessKey = config["secretAccessKey"]; } if (!((config["bucketName"] == null) || string.IsNullOrEmpty(config["bucketName"]))) { this.BucketName = config["bucketName"]; } if (!((config["host"] == null) || string.IsNullOrEmpty(config["host"]))) { this.Host = config["host"]; } if (!((config["useSSL"] == null) || string.IsNullOrEmpty(config["useSSL"]))) { this.UseSSL = Boolean.Parse(config["useSSL"]); } if (!((config["useSubDomains"] == null) || string.IsNullOrEmpty(config["useSubDomains"]))) { this._useSubDomains = Boolean.Parse(config["useSubDomains"]); } if (!((config["expirationTimeSpan"] == null) || string.IsNullOrEmpty(config["expirationTimeSpan"]))) { this.ExpirationTimeSpan = TimeSpan.Parse(config["expirationTimeSpan"]); } base.Initialize(name, config); _Service = new S3Service(); _Service.AccessKeyID = this._AccessKeyID; _Service.SecretAccessKey = this._SecretAccessKey; _Service.UseSsl = _useSSL; if (!String.IsNullOrEmpty(Host)) { _Service.Host = Host; } _Service.UseSubdomains = _useSubDomains; }
public override IEnumerator <FindData> GetFiles() { if (Context.CurrentOperation == StatusOperation.List) { return(S3Service .GetBuckets() .Select(b => new FindData(b.Key, FileAttributes.Directory, b.CreationDate)) .Union(new[] { new FindData(RS.NewBucket) }) .GetEnumerator()); } if (Context.CurrentOperation == StatusOperation.Delete) { return(EmptyFindDataEnumerator); } return(null); }
public override bool CreateFolder() { using (var form = new NewBucketForm(bucketName)) { if (form.ShowDialog() != DialogResult.OK) { return(false); } if (RS.NewBucket.Equals(form.BucketName, StringComparison.CurrentCultureIgnoreCase)) { return(false); } S3Service.CreateBucket(form.BucketName, form.BucketLocation); return(true); } }
public override FileOperationResult CopyTo(S3CommanderFile dest, bool overwrite, bool move, RemoteInfo info) { var entry = dest as Entry; if (entry == null) { return(FileOperationResult.NotSupported); } try { if (!overwrite && S3Service.ObjectExists(entry.bucketName, entry.key)) { return(FileOperationResult.Exists); } var source = bucketName + "/" + key; var target = entry.bucketName + "/" + entry.key; if (SetProgress(source, target, 0, 100) == false) { return(FileOperationResult.UserAbort); } S3Service.CopyObject(bucketName, key, entry.bucketName, entry.key); if (move) { if (SetProgress(source, target, 50, 100) == false) { return(FileOperationResult.UserAbort); } DeleteFile(); } if (SetProgress(source, target, 100, 100) == false) { return(FileOperationResult.UserAbort); } return(FileOperationResult.OK); } catch (Exception ex) { Context.Log.Error(ex); return(FileOperationResult.WriteError); } }
public override IEnumerator <FindData> GetFiles() { if (Context.CurrentOperation == StatusOperation.CalculateSize || Context.CurrentOperation == StatusOperation.Delete || Context.CurrentOperation == StatusOperation.RenameMoveMulti) { return(S3Service .GetObjects(bucketName, FolderKey, "") .Where(o => o is S3Entry) .Select(o => ToFindData(o)) .GetEnumerator()); } return(S3Service .GetObjects(bucketName, FolderKey, "/") .Where(o => !string.IsNullOrEmpty(o.Key)) .Select(o => ToFindData(o)) .GetEnumerator()); }
public async Task <IActionResult> getLink(image obj) { try { var image = ""; var imageResponse = await S3Service.UploadObject(obj.imageUrl); var response = new JsonResult(new Object()); var newImage = new image(); if (imageResponse.Success) { newImage.imageUrl = $"https://pistis.s3.us-east-2.amazonaws.com/{imageResponse.FileName}"; } return(Ok(newImage)); } catch (Exception ex) { throw; } }
public async Task <IActionResult> uploadImageN([FromBody] NewsletterImage model) { try { var data = db.NewsletterImage.Where(x => x.IsActive == true).FirstOrDefault(); if (data != null) { data.IsActive = true; data.Description = model.Description; data.HeaderName = model.HeaderName; var imageResponse = await S3Service.UploadObject(model.Image); var response = new JsonResult(new Object()); if (imageResponse.Success) { data.Image = $"https://pistis.s3.us-east-2.amazonaws.com/{imageResponse.FileName}"; } db.SaveChanges(); } else { var newsletter = new NewsletterImage(); var imageResponse = await S3Service.UploadObject(model.Image); var response = new JsonResult(new Object()); if (imageResponse.Success) { newsletter.Image = $"https://pistis.s3.us-east-2.amazonaws.com/{imageResponse.FileName}"; } newsletter.IsActive = true; newsletter.Description = model.Description; newsletter.HeaderName = model.HeaderName; db.NewsletterImage.Add(newsletter); db.SaveChanges(); } } catch (Exception ex) { throw ex; } return(Ok()); }
static void RunS3Tests() { // This is basically a bunch of testing code written either for the LitS3 homepage // or to attempt to reproduce various submitted issues. var s3 = new S3Service { AccessKeyID = Settings.Default.AccessKeyID, SecretAccessKey = Settings.Default.SecretAccessKey }; s3.UseSubdomains = true; //s3.CreateBucketInEurope("lits3-demo-europe"); s3.UseSsl = false; s3.BeforeAuthorize += (o, a) => { a.Request.Proxy = new WebProxy("http://192.168.104.1:7777"); }; string bucket = "lits3-fashionable" + new Random().Next(); s3.CreateBucketInEurope(bucket); //s3.AddObjectString("hello world", bucket, "hello.txt"); s3.ListAllObjects(bucket); s3.DeleteBucket(bucket); //s3.AddObjectString("hello world", "lits3-demo", "stuff/hello world.txt", "text/plain", default(CannedAcl)); //Console.WriteLine(s3.GetAuthorizedUri("lits3-demo", "stuff/hello world.txt", DateTime.Now.AddYears(1)).AbsoluteUri); //Console.WriteLine(string.Join(",", s3.ListObjects("lits3-demo", "stuff/").Select(e => e.Name).ToArray())); /*var addRequest = new AddObjectRequest(s3, "lits3-demo", "File 1.txt"); addRequest.ContentLength = 0; addRequest.CacheControl = "max-age=3000, must-revalidate"; addRequest.Expires = DateTime.Now.Date.AddYears(10); addRequest.GetResponse(); var getRequest = new GetObjectRequest(s3, "lits3-demo", "File 1.txt"); GetObjectResponse getResponse = getRequest.GetResponse(); Console.WriteLine("Expires: " + getResponse.Headers[HttpResponseHeader.Expires]); Console.WriteLine("CacheControl: " + getResponse.Headers[HttpResponseHeader.CacheControl]);*/ //s3.AddObjectString("Bonjour Europe!", "lits3-demo-europe", "bonjour.txt"); //Console.WriteLine(s3.GetObjectString("lits3-demo-europe", "bonjour.txt")); //s3.ForEachBucket(Console.WriteLine); /*string etag; { var request = new AddObjectRequest(s3, "lits3-demo", "File 1.txt"); request.ContentLength = 0; request.Metadata.Add("sky", "blue"); var response = request.GetResponse(); etag = response.ETag; Console.WriteLine("ETag: " + etag); } { var request = new CopyObjectRequest(s3, "lits3-demo", "File 1.txt", "File 1 copy.txt"); //request.CopyIfNoneMatchETag = etag; request.MetadataDirective = MetadataDirective.Replace; //request.Metadata.Add("shoes", "brown"); request.GetResponse(); } { var request = new GetObjectRequest(s3, "lits3-demo", "File 1.txt", true); var response = request.GetResponse(); Console.WriteLine("Sky on file1 is " + response.Metadata.Get("sky")); } Console.WriteLine("File2 exists? " + s3.ObjectExists("lits3-demo", "File 2.txt")); { var request = new GetObjectRequest(s3, "lits3-demo", "File 1 copy.txt", true); var response = request.GetResponse(); Console.WriteLine("Sky on file2 is " + response.Metadata.Get("sky")); Console.WriteLine("Shoes on file2 are " + response.Metadata.Get("shoes")); }*/ /*s3.ForEachBucket(Console.WriteLine); //> Bucket "mybucket" //> Bucket "myotherbucket" //> Bucket "lits3-demo" s3.AddObjectString("This is file one!", "lits3-demo", "File 1.txt"); s3.ForEachObject("lits3-demo", Console.WriteLine); //> S3Object "File 1.txt" //> Common Prefix "MyDirectory" Console.WriteLine(s3.GetObjectString("lits3-demo", "File 1.txt")); //> This is file one! s3.CopyObject("lits3-demo", "File 1.txt", "File 1 copy.txt"); s3.ForEachObject("lits3-demo", Console.WriteLine); //> S3Object "File 1 copy.txt" //> S3Object "File 1.txt" //> Common Prefix "MyDirectory" s3.ForEachObject("lits3-demo", "MyDirectory/", Console.WriteLine); //> S3Object "Other File.txt" // "Need more flexibility?" var request = new GetObjectRequest(s3, "lits3-demo", "File 1.txt"); request.BeginGetResponse(result => { // comes in on a separate thread using (GetObjectResponse response = request.EndGetResponse(result)) { StreamReader reader = new StreamReader(response.GetResponseStream()); Console.WriteLine(reader.ReadToEnd()); //> This is file one! } }, null); // continues immediately without blocking... */ //s3.AddObjectProgress += (s, e) => Console.WriteLine("Progress: " + e.ProgressPercentage); //s3.AddObjectString("Hello world", "lits3-demo", "Test File.txt"); //> Progress: 0 //> Progress: 40 //> ... //> Progress: 100 /*string objectContents = "This will be written directly to S3."; long objectLength = objectContents.Length; s3.AddObject("lits3-demo", "Directly Written.txt", objectLength, stream => { // Create a StreamWriter to write some text data var writer = new StreamWriter(stream, Encoding.ASCII); writer.Write(objectContents); writer.Flush(); });*/ #region Some more testing code that needs to be refactored and separated into classes //s3.CreateBucketInEurope("test-europe234234"); //var copyRequest = new CopyObjectRequest(s3, "spotlightmobile", "office.jpg", "office2.jpg"); //copyRequest.GetResponse().Close(); //s3.CopyObject("spotlightmobile", "office.jpg", "office2.jpg"); /* string testBucket = "ctu-test"; // Upload dynamically from a stream s3.AddObject(testBucket, "test-stream", 10, stream => { // write 10 ASCII characters starting with "a" foreach (byte i in Enumerable.Range(65, 10)) stream.WriteByte(i); }); // Download dynamically from a stream using (Stream stream = s3.GetObjectStream(testBucket, "test-stream")) Console.WriteLine("Contents: " + new StreamReader(stream).ReadToEnd()); // List all objects s3.ListAllObjects(testBucket, null, entry => Console.WriteLine("Found: " + entry)); //string testBucket = "ctu-test"; //string testKey = "hello"; foreach (ListEntry entry in s3.ListObjects("ctu-beta", null)) Console.WriteLine(entry); Console.WriteLine("Bucket status of {0}: {1}", testBucket, s3.QueryBucket(testBucket)); //s3.DeleteBucket(testBucket); s3.AddObjectString("four thousand years", testBucket, testKey, "text/plain", CannedAcl.Private); Console.WriteLine(s3.GetObjectString(testBucket, testKey)); Console.WriteLine(s3.GetUrl(testBucket, testKey)); Console.WriteLine(s3.GetAuthorizedUrl(testBucket, testKey, DateTime.Now + TimeSpan.FromMinutes(10))); //s3.DeleteObject("ctu-test", "hello"); s3.AddObjectString("jackdaws quartz", testBucket, "jackdaws quartz", "text/plain", CannedAcl.Private); s3.AddObjectString("one+two=three", testBucket, "one+two=three", "text/plain", CannedAcl.Private); s3.UseSubdomains = false; s3.ListObjects("testing_special_chars", null).ForEach(i => Console.WriteLine(i)); Console.WriteLine(s3.GetObjectString(testBucket, "jackdaws quartz")); Console.WriteLine(s3.GetObjectString(testBucket, "one+two=three")); Debug.WriteLine(s3.GetAuthorizedUrl(testBucket, "jackdaws quartz", DateTime.Now + TimeSpan.FromMinutes(10))); Debug.WriteLine(s3.GetAuthorizedUrl(testBucket, "one+two=three", DateTime.Now + TimeSpan.FromMinutes(10))); { // Create a file on S3 from the contents of a string s3.AddObjectString("some simple string content", testBucket, "test-object"); // Upload a local file //s3.AddObject(@"H:\Music\Andrew Bird\The Swimming Hour\02 - Andrew Bird - The Swimming Hour - Core And Rind.mp3", testBucket, "andrew-bird.mp3"); // Upload dynamically from a stream var request = new AddObjectRequest(s3, testBucket, "test-stream"); request.ContentLength = 36; // Add some metadata request.Metadata["meaning-of-life"] = "42"; // This will call out to the S3 server and initiate an upload using (Stream requestStream = request.GetRequestStream()) { // Create a StreamWriter to write some text data var writer = new StreamWriter(requestStream, Encoding.ASCII); writer.Write("This will be written directly to S3."); writer.Flush(); } // We're finished, so get the response to finish our submission. Remember to Close() it! request.GetResponse().Close(); } { // Get the contents of a file on S3 as a string Console.WriteLine(s3.GetObjectString(testBucket, "test-object")); // Download a file from S3 into a local file //s3.GetObject(testBucket, "andrew-bird.mp3", @"C:\andrew-bird.mp3"); // Download dynamically into a stream var request = new GetObjectRequest(s3, testBucket, "test-stream"); using (GetObjectResponse response = request.GetResponse()) { // Read some metadata Console.WriteLine("Meaning of life: " + response.Metadata["meaning-of-life"]); // prints "42" // Create a StreamReader to read the text data we stored above var reader = new StreamReader(response.GetResponseStream(), Encoding.ASCII); Console.WriteLine(reader.ReadLine()); } } { // Download dynamically into a stream var request = new GetObjectRequest(s3, testBucket, "andrew-bird.mp3"); using (GetObjectResponse response = request.GetResponse()) { var buffer = new byte[31768]; var bytesDownloaded = 0; var responseStream = response.GetResponseStream(); while (bytesDownloaded < response.ContentLength) { var bytesRead = responseStream.Read(buffer, 0, buffer.Length); // write the downloaded data somewhere... bytesDownloaded += bytesRead; var percent = (int)(((float)bytesDownloaded / (float)response.ContentLength) * 100); Console.CursorLeft = 0; Console.Write("Downloading... {0}%", percent); } } } */ //s3.CreateBucket("ctu-america"); //Console.WriteLine("America in europe? " + s3.IsBucketInEurope("ctu-america")); //s3.CreateBucketInEurope("ctu-europe"); //Console.WriteLine("Europe in europe? " + s3.IsBucketInEurope("ctu-europe")); /*var request = new GetAllBucketsRequest(s3); request.BeginGetResponse(delegate (IAsyncResult result) { using (GetAllBucketsResponse response = request.EndGetResponse(result)) { foreach (Bucket bucket in response.Buckets) Console.WriteLine(bucket); } }, null);*/ #endregion }