public static void Main(string[] args) { // create the AWS S3 client AmazonS3Client s3 = AWSS3Factory.getS3Client(); String bucketName = String.Join("-", AWSS3Factory.S3_BUCKET, DateTime.Now.ToString("yyyyMMddHHmmss")); //********************// // 1. Create a bucket // //********************// Console.Write(string.Format(" [*] Creating bucket '{0}'... ", bucketName)); PutBucketResponse pbRes = s3.PutBucket(bucketName); if (pbRes.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); //*******************************************// // 2. Get current bucket versioning status // //*******************************************// Console.Write(string.Format(" [*] Getting bucket versioning status for bucket '{0}'... ", bucketName)); GetBucketVersioningRequest gvr = new GetBucketVersioningRequest() { BucketName = bucketName }; GetBucketVersioningResponse gvrResponse = s3.GetBucketVersioning(gvr); Console.Write(string.Format("status: {0}", gvrResponse.VersioningConfig.Status)); //*******************************************// // 3. Enable object versioning on the bucket // //*******************************************// // enabled versioning if not yet enabled if (gvrResponse.VersioningConfig.Status != VersionStatus.Enabled) { Console.Write(string.Format(" [*] Enabling bucket versioning for bucket '{0}'... ", bucketName)); PutBucketVersioningRequest pvr = new PutBucketVersioningRequest() { BucketName = bucketName, VersioningConfig = new S3BucketVersioningConfig() { Status = VersionStatus.Enabled } }; PutBucketVersioningResponse pvrResponse = s3.PutBucketVersioning(pvr); if (pvrResponse.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); } //***********************************************************************// // 4. Upload three object with three versions each (Total of 9 versions) // //***********************************************************************// Console.Write(string.Format(" [*] Uploading 3 objects with 3 versions each to bucket '{0}'... ", bucketName)); for (int i = 0; i < 3; i++) { string objectKey = String.Format("object-{0}", i); for (int j = 0; j < 3; j++) { string objectContent = String.Format("This is object {0}, revision {1}", i, j); PutObjectRequest poRequest = new PutObjectRequest() { BucketName = bucketName, Key = objectKey, ContentBody = objectContent }; PutObjectResponse poResponse = s3.PutObject(poRequest); if (poResponse.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.Write("."); } } Console.WriteLine("done"); //*******************************************// // 5. List the object versions in the bucket // //*******************************************// Console.WriteLine(" [*] Listing object versions..."); ListVersionsRequest request = new ListVersionsRequest() { BucketName = bucketName, // You can optionally specify key name prefix in the request // if you want list of object versions of a specific object. // For this example we limit response to return list of 2 versions. MaxKeys = 2 }; bool moreRecords = true; while (moreRecords) { ListVersionsResponse response = s3.ListVersions(request); foreach (S3ObjectVersion version in response.Versions) { Console.WriteLine(string.Format(" [x] -> Object key: {0}", version.Key)); Console.WriteLine(string.Format(" [x] VersionId: {0}", version.VersionId)); Console.WriteLine(string.Format(" [x] IsDeleteMarker: {0}", version.IsDeleteMarker)); Console.WriteLine(string.Format(" [x] LastModified: {0}", version.LastModified)); } // If response is truncated, set the marker to get the next // set of keys. if (response.IsTruncated) { request.KeyMarker = response.NextKeyMarker; request.VersionIdMarker = response.NextVersionIdMarker; } else { moreRecords = false; } Console.WriteLine(string.Format(" [x] More records? {0}", moreRecords)); } //*******************************************// // 6. Permanently delete the object versions // //*******************************************// Console.Write(" [*] Permanently deleting all object versions... "); ListVersionsResponse lv2Response = s3.ListVersions(bucketName); if (lv2Response.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } foreach (S3ObjectVersion version in lv2Response.Versions) { DeleteObjectRequest do2Request = new DeleteObjectRequest() { BucketName = bucketName, Key = version.Key, VersionId = version.VersionId }; DeleteObjectResponse do2Response = s3.DeleteObject(do2Request); if (do2Response.HttpStatusCode != System.Net.HttpStatusCode.NoContent) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } } Console.WriteLine("done"); //***********************// // 7. Delete the bucket // //***********************// Console.Write(String.Format(" [*] Deleting bucket '{0}' (sleeping 5 seconds)... ", bucketName)); System.Threading.Thread.Sleep(5000); DeleteBucketResponse dbRes = s3.DeleteBucket(bucketName); if (dbRes.HttpStatusCode != System.Net.HttpStatusCode.NoContent) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); Console.WriteLine(" [*] Example is completed. Press any key to exit..."); Console.ReadLine(); }
public HttpResponseMessage UploadProductImg() { //Get the uploaded image from the Files collection if (System.Web.HttpContext.Current.Request.Files.AllKeys.Any()) { //Get data from request file var httpPostedFile = System.Web.HttpContext.Current.Request.Files["HelpSectionImages"]; var productId = int.Parse(System.Web.HttpContext.Current.Request.Form["ProductId"]); var hiddenImg = System.Web.HttpContext.Current.Request.Form["hiddenImg"]; var isHiddenImg = string.IsNullOrEmpty(hiddenImg) ? false : bool.Parse(hiddenImg); if (httpPostedFile != null && productId > 0) { var query = string.Format("SELECT Id,Sku,FriendlyUrl,ImgOriginal,Img100x100,Img300x300, Img500x500 FROM dbo.Product WHERE Id = {0}", productId); Product productImage; using (var cnn = SqlHelper.OpenConnection()) { productImage = cnn.Query <Product>(query).FirstOrDefault(); } if (productImage == null) { return(Request.CreateResponse(HttpStatusCode.BadRequest, new { success = false, message = "Sản phẩm không tồn tại" })); } var client = new AmazonS3Client(accessKey, secretKey, Amazon.RegionEndpoint.USEast2); var keyName = (isHiddenImg?"be-cung-shop-":"") + productImage.FriendlyUrl + DateTime.Now.GetHashCode().ToString("x") + ".jpg"; var newStream = Utils.Helpers.ImageHelper.DrawText(httpPostedFile.InputStream, productImage.Sku); PutObjectRequest request = new PutObjectRequest() { BucketName = bucketName, Key = keyName, InputStream = newStream, AutoCloseStream = true, CannedACL = S3CannedACL.PublicRead, StorageClass = S3StorageClass.ReducedRedundancy }; PutObjectResponse response = client.PutObject(request); if (response.HttpStatusCode == HttpStatusCode.OK) { var imgOriginal = host + "/bcs-s3/products/" + keyName; using (var cnn = SqlHelper.OpenConnection()) { query = "Update Product set ImgOriginal = @imgOriginal Where Id = @id"; cnn.Execute(query, new { imgOriginal, id = productId }); } Task.Factory.StartNew(() => { using (var wc = new WebClient()) { wc.DownloadString("https:" + host + "/image/product/" + productId); //Bug https } }); return(Request.CreateResponse(HttpStatusCode.OK, new { success = true, message = "Upload ảnh thành công" })); } } } return(Request.CreateResponse(HttpStatusCode.BadRequest, new { success = false, message = "Lỗi không xác định, vui lòng liên hệ quản trị" })); }
static async Task PutObjectWithTagsTestAsync() { try { // 1. Put an object with tags. var putRequest = new PutObjectRequest { BucketName = bucketName, Key = keyName, FilePath = filePath, TagSet = new List <Tag> { new Tag { Key = "Keyx1", Value = "Value1" }, new Tag { Key = "Keyx2", Value = "Value2" } } }; PutObjectResponse response = await client.PutObjectAsync(putRequest); // 2. Retrieve the object's tags. GetObjectTaggingRequest getTagsRequest = new GetObjectTaggingRequest { BucketName = bucketName, Key = keyName }; GetObjectTaggingResponse objectTags = await client.GetObjectTaggingAsync(getTagsRequest); for (int i = 0; i < objectTags.Tagging.Count; i++) { Console.WriteLine("Key: {0}, Value: {1}", objectTags.Tagging[i].Key, objectTags.Tagging[i].Value); } // 3. Replace the tagset. Tagging newTagSet = new Tagging(); newTagSet.TagSet = new List <Tag> { new Tag { Key = "Key3", Value = "Value3" }, new Tag { Key = "Key4", Value = "Value4" } }; PutObjectTaggingRequest putObjTagsRequest = new PutObjectTaggingRequest() { BucketName = bucketName, Key = keyName, Tagging = newTagSet }; PutObjectTaggingResponse response2 = await client.PutObjectTaggingAsync(putObjTagsRequest); // 4. Retrieve the object's tags. GetObjectTaggingRequest getTagsRequest2 = new GetObjectTaggingRequest(); getTagsRequest2.BucketName = bucketName; getTagsRequest2.Key = keyName; GetObjectTaggingResponse objectTags2 = await client.GetObjectTaggingAsync(getTagsRequest2); for (int i = 0; i < objectTags2.Tagging.Count; i++) { Console.WriteLine("Key: {0}, Value: {1}", objectTags2.Tagging[i].Key, objectTags2.Tagging[i].Value); } } catch (AmazonS3Exception e) { Console.WriteLine( "Error encountered ***. Message:'{0}' when writing an object" , e.Message); } catch (Exception e) { Console.WriteLine( "Encountered an error. Message:'{0}' when writing an object" , e.Message); } }
static void Main(string[] args) { file = new System.IO.StreamWriter(@"C:\TEMP\aws_send2s3.log", true); string Sintaxe = ""; Sintaxe = " " + Environment.NewLine + "SINTAX:" + Environment.NewLine + " " + Environment.NewLine + " > aws_send2s3.exe <mode>,<bucket>,<key>,<secret>,<fullpath> " + Environment.NewLine + " " + Environment.NewLine + "PS.: IF <mode> = 'A' or null, aws_send2s3.config must have config." + Environment.NewLine + " " + Environment.NewLine; PutObjectResponse resposta = null; file.WriteLine("Loading configs..."); Console.WriteLine("Loading configs..."); // Carrega os parâmetros ou exibe mensagem sobre sinxaxe if (!carregouParametros(args)) { if (args.Length.Equals(0)) { Console.WriteLine(Sintaxe); return; } } else { if (!MODO.Equals("A")) { Console.WriteLine(Sintaxe); return; } } if (!File.Exists(caminho_arquivo)) { Console.WriteLine("File not found."); file.WriteLine("File not found."); return; } try { try { transfer = new TransferUtility(Chave, Segredo); file.WriteLine("Generating request to send to " + meuBucket + "..."); Console.WriteLine("Generating request to send to " + meuBucket + "..."); TransferUtilityUploadRequest request = new TransferUtilityUploadRequest() { BucketName = meuBucket, FilePath = caminho_arquivo }; Console.WriteLine("Sending..."); file.WriteLine("Sending..."); transfer.Upload(request); Console.WriteLine("... Sent!"); file.WriteLine("... Sent!"); } catch (Exception ex) { throw ex; } finally { transfer.Dispose(); } Console.WriteLine("/nFile sent!"); file.WriteLine("/nFile sent!"); } catch (Exception ex) { string msgErro = "An error ocourred." + Environment.NewLine + "[INTERNAL_MESSAGE=" + ex.Message.ToString() + Environment.NewLine; if (ex.InnerException != null) { msgErro += "] & [INNER_MESSAGE=" + ex.InnerException.Message.ToString() + Environment.NewLine; } if (ex.StackTrace != null) { msgErro += "] & [STACK_TRACE=" + ex.StackTrace.ToString() + "]"; } if (ex.InnerException == null && ex.StackTrace == null) { msgErro += "]"; } Console.WriteLine(msgErro); file.WriteLine(msgErro); } finally { Console.WriteLine("Answer from S3..."); file.WriteLine("Answer from S3..."); if (resposta == null) { Console.WriteLine("Answer was null."); file.WriteLine("Answer was null."); } else { Console.WriteLine(resposta.ResponseMetadata.ToString()); file.WriteLine(resposta.ResponseMetadata.ToString()); } file.Flush(); file.Dispose(); } }
//Links multiple using | public HttpResponseMessage DownloadAndUploadBlogImages([FromBody] string links) { var arr = links.Split('|'); var result = new List <string>(); using (var webClient = new WebClient()) { webClient.Headers.Add("user-agent", "Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)"); foreach (var link in arr) { try { if (link.StartsWith("https://cdn.becungshop.vn/")) { result.Add(link); continue; } byte[] data = webClient.DownloadData(link); var fileName = DateTime.Now.GetHashCode().ToString("x") + ".jpg";//Path.GetFileName(link); //if(!fileName.Contains(".jpg") && !fileName.Contains(".png")) fileName= PutObjectRequest request = new PutObjectRequest() { BucketName = bucketBlogName, Key = fileName, InputStream = new MemoryStream(data), AutoCloseStream = true, CannedACL = S3CannedACL.PublicRead, StorageClass = S3StorageClass.ReducedRedundancy }; var client = new AmazonS3Client(accessKey, secretKey, Amazon.RegionEndpoint.APSoutheast1); PutObjectResponse response = client.PutObject(request); if (response.HttpStatusCode == HttpStatusCode.OK) { //Task.Factory.StartNew(() => //{ var s3Url = host + "/bcs-s3/blogs/" + fileName; using (var wc = new WebClient()) { var uri = new Uri(@"https:" + s3Url + "?w=500"); var loc = System.Web.Hosting.HostingEnvironment.MapPath("~") + @"/images/blog/content/" + fileName; wc.DownloadFile(uri, loc); } result.Add(host + @"/images/blog/content/" + fileName); //using (var wc = new WebClient()) //{ // result.Add(wc.DownloadString($"https://*****:*****@"{0}/{1}", // Path.Combine(System.Web.HttpContext.Current.Server.MapPath(@"~\images\blog")), // fileName.ToLower()); //ImageBuilder.Current.Build(data, descPath, new ResizeSettings("width=2000;height=2000;format=jpg;mode=max")); //result.Add(@"https://becungshop.vn/Images/blog/" + fileName.ToLower()); } catch (Exception ex) { result.Add(string.Empty); _log.Error(ex); } } } return(Request.CreateResponse(HttpStatusCode.OK, new { success = true, data = result })); }
//public static async Task<UploadPhotoModel> UploadObject(string file) //{ // try // { // // connecting to the client // var fileExtension = "."; // var FileData = file.Split(','); // file = FileData[1]; // if (FileData[0] != null) // fileExtension += FileData[0].Split('/')[1].Split(';')[0]; // var client = new AmazonS3Client(accessKey, accessSecret, Amazon.RegionEndpoint.USEast2); // // get the file and convert it to the byte[] // byte[] fileBytes = Convert.FromBase64String(file); // // create unique file name // var fileName = Guid.NewGuid().ToString() + fileExtension; // PutObjectResponse response = null; // PutObjectResponse response150 = null; // PutObjectResponse response450 = null; // var stream = new MemoryStream(fileBytes); // Image Oimg = Image.FromStream(stream); // var width = Oimg.Width; // var height = Oimg.Height; // //For Orignal Image // try // { // using (stream) // { // var request = new PutObjectRequest // { // BucketName = bucket, // Key = fileName, // InputStream = stream, // //ContentType = file.ContentType, // CannedACL = S3CannedACL.PublicRead, // }; // response = await client.PutObjectAsync(request); // } // } // catch(Exception ex) // { // throw ex; // } // if (response.HttpStatusCode == System.Net.HttpStatusCode.OK) // { // return new UploadPhotoModel // { // Success = true, // FileName = fileName // }; // } // else // { // return new UploadPhotoModel // { // Success = false, // FileName = fileName, // code= response.HttpStatusCode, // }; // } // } // catch (AmazonS3Exception amazonS3Exception) // { // if ( // amazonS3Exception.ErrorCode != null && // (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || // amazonS3Exception.ErrorCode.Equals("InvalidSecurity")) // ) // { // Console.WriteLine("Please check the provided AWS Credentials."); // Console.WriteLine("If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"); // } // else // { // Console.WriteLine("An Error, number {0}, occurred when listing buckets with the message '{1}", amazonS3Exception.ErrorCode, amazonS3Exception.Message); // } // throw amazonS3Exception; // } //} public static async Task <UploadPhotoModel> UploadObject(string file) { try { // connecting to the client var fileExtension = "."; var FileData = file.Split(','); file = FileData[1]; if (FileData[0] != null) { fileExtension += FileData[0].Split('/')[1].Split(';')[0]; } var client = new AmazonS3Client(accessKey, accessSecret, Amazon.RegionEndpoint.USEast2); // get the file and convert it to the byte[] //byte[] fileBytes = new Byte[file.Length]; //file.OpenReadStream().Read(fileBytes, 0, Int32.Parse(file.Length.ToString())); byte[] fileBytes = Convert.FromBase64String(file); // create unique file name var fileName = Guid.NewGuid().ToString() + fileExtension; PutObjectResponse response = null; using (var stream = new MemoryStream(fileBytes)) { var request = new PutObjectRequest { BucketName = bucket, Key = fileName, InputStream = stream, //ContentType = file.ContentType, CannedACL = S3CannedACL.PublicRead, }; response = await client.PutObjectAsync(request); }; if (response.HttpStatusCode == System.Net.HttpStatusCode.OK) { return(new UploadPhotoModel { Success = true, FileName = fileName }); } else { return(new UploadPhotoModel { Success = false, FileName = fileName }); } } catch (AmazonS3Exception amazonS3Exception) { if ( amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity")) ) { Console.WriteLine("Please check the provided AWS Credentials."); Console.WriteLine("If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"); } else { Console.WriteLine("An Error, number {0}, occurred when listing buckets with the message '{1}", amazonS3Exception.ErrorCode, amazonS3Exception.Message); } throw amazonS3Exception; } }
public static async Task <UploadPhotoModel> updateUploadObject450(string file, string fileName) { try { var client = new AmazonS3Client(accessKey, accessSecret, Amazon.RegionEndpoint.USEast2); // get the file and convert it to the byte[] //byte[] fileBytes = new Byte[file.Length]; //file.OpenReadStream().Read(fileBytes, 0, Int32.Parse(file.Length.ToString())); byte[] fileBytes = Convert.FromBase64String(file); // create unique file name // var fileName = Guid.NewGuid().ToString() + fileExtension; PutObjectResponse response = null; var stream = new MemoryStream(fileBytes); Image Oimg = Image.FromStream(stream); var width = Oimg.Width; var height = Oimg.Height; var givenWidth = 450; var givenHeight = 450; try { byte[] fileBytes450; var streamorignal = new MemoryStream(fileBytes); Image img = Image.FromStream(streamorignal); // Figure out the ratio double ratioX = (double)givenWidth / (double)width; double ratioY = (double)givenHeight / (double)height; // use whichever multiplier is smaller double ratio = ratioX < ratioY ? ratioX : ratioY; // now we can get the new height and width int h = Convert.ToInt32(height * ratio); int w = Convert.ToInt32(width * ratio); using (Bitmap b = new Bitmap(img, new Size(w, h))) { using (MemoryStream ms2 = new MemoryStream()) { b.Save(ms2, System.Drawing.Imaging.ImageFormat.Jpeg); fileBytes450 = ms2.ToArray(); } } using (var stream450 = new MemoryStream(fileBytes450)) { var request = new PutObjectRequest { BucketName = bucket450, Key = fileName, InputStream = stream450, //ContentType = file.ContentType, CannedACL = S3CannedACL.PublicRead, }; response = await client.PutObjectAsync(request); }; } catch (Exception ex) { throw ex; } if (response.HttpStatusCode == System.Net.HttpStatusCode.OK) { return(new UploadPhotoModel { Success = true, FileName = fileName }); } else { return(new UploadPhotoModel { Success = false, FileName = fileName }); } } catch (AmazonS3Exception amazonS3Exception) { if ( amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity")) ) { Console.WriteLine("Please check the provided AWS Credentials."); Console.WriteLine("If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"); } else { Console.WriteLine("An Error, number {0}, occurred when listing buckets with the message '{1}", amazonS3Exception.ErrorCode, amazonS3Exception.Message); } throw amazonS3Exception; } }
public async Task <IActionResult> Create(BlogViewModel model) { string BUCKET_NAME = "srx-blog-images"; if (ModelState.IsValid) { UserContextEntity user = new UserContextEntity() { UserId = model.UserId, UserName = model.UserName, UserPicPath = model.UserPicPath }; var blog = new Entities.BlogEntity(); string uniqueFileName = null; if (model.ImageFile != null) { var response = new PutObjectResponse(); var client = new AmazonS3Client(RegionEndpoint.EUWest1); uniqueFileName = Guid.NewGuid().ToString() + '_' + model.ImageFile.FileName; using (var stream = new MemoryStream()) { model.ImageFile.CopyTo(stream); var request = new PutObjectRequest { BucketName = BUCKET_NAME, Key = uniqueFileName, InputStream = stream, ContentType = model.ImageFile.ContentType, }; response = await client.PutObjectAsync(request); }; } blog.BlogImage = uniqueFileName; blog.IsDeleted = false; blog.Like = new List <UserContextEntity>(); blog.DisLike = new List <UserContextEntity>(); blog.Ratings = new List <string>(); blog.SubTitle = model.SubTitle; blog.Title = model.Title; List <string> list = model.Tags.Split(",").ToList(); blog.Tags = new List <string>(); foreach (string str in list) { blog.Tags.Add(str.Trim()); } blog.Category = model.Category; blog.CreatedBy = user; blog.CreatedOn = DateTime.Now; blog.Content = model.Content; await _blogService.Create(blog); return(RedirectToAction("Index", "Home")); } else { await model.Initialize(_categoryCollection, HttpContext); return(View(model)); } }
public async Task <List <Label> > AddImageToS3(List <Stream> pImageList) { try { if (pImageList.Count > 0) { foreach (Stream mImage in pImageList) { AmazonS3Config mS3Config = new AmazonS3Config(); IAmazonS3 mClient; List <Label> mLabelList = new List <Label>(); string mImageName = System.DateTime.Now.Day.ToString() + System.DateTime.Now.Month.ToString() + System.DateTime.Now.Year.ToString() + System.DateTime.Now.Minute.ToString() + System.DateTime.Now.Second.ToString() + ".jpg"; PutObjectRequest request = new PutObjectRequest() { BucketName = "innovationsummit20182", Key = mImageName, InputStream = mImage }; mS3Config.RegionEndpoint = Amazon.RegionEndpoint.EUWest1; mClient = new AmazonS3Client("AKIAIHLRHZDRJ7VN25AA", "LA2U98sp68uZ4rlEqjq/Mb8B0oFFPIJPwyBT+y5z", mS3Config); Task <PutObjectResponse> mS3ResponseTask = mClient.PutObjectAsync(request); PutObjectResponse mS3Response = await mS3ResponseTask; if (mS3Response.HttpStatusCode == System.Net.HttpStatusCode.OK) { Task <List <Label> > mLabelListTask = AnalizeImageAmazonRekognition(mImageName, "innovationsummit20182", Amazon.RegionEndpoint.EUWest1, "AKIAIHLRHZDRJ7VN25AA", "LA2U98sp68uZ4rlEqjq/Mb8B0oFFPIJPwyBT+y5z"); mLabelList = await mLabelListTask; } ////GetObjectResponse mResponse = this.GetAmazonS3Image(mFilePost.FileName); ////https://s3-eu-west-1.amazonaws.com/innovationsummit20182/scarg.png //foreach (Label mLabel in mLabelList.Where(c => c.Confidence >= 90).ToList()) //{ // switch (mLabel.Name) // { // case "Scarf": // { // //return Json(new AmazonRekognitionResult("Scarf", "https://s3-eu-west-1.amazonaws.com/innovationsummit20182/" + mFilePost.FileName, "test")); // return true; // } // case "Shirt": // { // //return Json(new AmazonRekognitionResult("Scarf", "https://s3-eu-west-1.amazonaws.com/innovationsummit20182/" + mFilePost.FileName, "test")); // return true; // } // } //} return(mLabelList); } } return(null); } catch (Exception ex) { throw ex; } }
[MultipleProviders(S3Provider.AmazonS3 | S3Provider.BackBlazeB2)] //This test returns the wrong IsLatest on Google public async Task ListObjectVersions(S3Provider provider, string _, ISimpleClient client) { await CreateTempBucketAsync(provider, client, async tempBucket => { //Enable versioning on the bucket await client.PutBucketVersioningAsync(tempBucket, true); //Verify that we enabled bucket versioning GetBucketVersioningResponse getVerResp = await client.GetBucketVersioningAsync(tempBucket); Assert.True(getVerResp.Status); PutObjectResponse putResp1 = await client.PutObjectStringAsync(tempBucket, "1", "a").ConfigureAwait(false); PutObjectResponse putResp2 = await client.PutObjectStringAsync(tempBucket, "2", "aa").ConfigureAwait(false); PutObjectResponse putResp3 = await client.PutObjectStringAsync(tempBucket, "3", "aaa").ConfigureAwait(false); DeleteObjectResponse putResp4 = await client.DeleteObjectAsync(tempBucket, "2"); //Delete object 2 PutObjectResponse putResp5 = await client.PutObjectStringAsync(tempBucket, "3", "aaaa").ConfigureAwait(false); //Overwrite object 3 ListObjectVersionsResponse listResp = await client.ListObjectVersionsAsync(tempBucket); Assert.True(listResp.IsSuccess); Assert.Equal(4, listResp.Versions.Count); if (provider == S3Provider.AmazonS3) { Assert.Equal(1, listResp.DeleteMarkers.Count); Assert.Equal(1000, listResp.MaxKeys); } Assert.Equal(tempBucket, listResp.BucketName); Assert.False(listResp.IsTruncated); S3Version version1 = listResp.Versions[0]; Assert.Equal("1", version1.ObjectKey); Assert.Equal(putResp1.VersionId, version1.VersionId); Assert.True(version1.IsLatest); Assert.Equal(DateTimeOffset.UtcNow.DateTime, version1.LastModified.DateTime, TimeSpan.FromMinutes(1)); Assert.Equal("\"0cc175b9c0f1b6a831c399e269772661\"", version1.Etag); Assert.Equal(1, version1.Size); if (provider != S3Provider.GoogleCloudStorage) { Assert.Equal(StorageClass.Standard, version1.StorageClass); } if (provider == S3Provider.AmazonS3) { Assert.Equal(TestConstants.TestUserId, version1.Owner?.Id); Assert.Equal(TestConstants.TestUsername, version1.Owner?.Name); } S3Version version2 = listResp.Versions[1]; Assert.Equal("2", version2.ObjectKey); Assert.Equal(putResp2.VersionId, version2.VersionId); Assert.False(version2.IsLatest); Assert.Equal(DateTimeOffset.UtcNow.DateTime, version2.LastModified.DateTime, TimeSpan.FromMinutes(1)); Assert.Equal("\"4124bc0a9335c27f086f24ba207a4912\"", version2.Etag); Assert.Equal(2, version2.Size); if (provider != S3Provider.GoogleCloudStorage) { Assert.Equal(StorageClass.Standard, version2.StorageClass); } if (provider == S3Provider.AmazonS3) { Assert.Equal(TestConstants.TestUserId, version2.Owner?.Id); Assert.Equal(TestConstants.TestUsername, version2.Owner?.Name); } //This is the latest version of object 3 and should be 4 in size S3Version version3 = listResp.Versions[2]; Assert.Equal("3", version3.ObjectKey); Assert.Equal(putResp5.VersionId, version3.VersionId); Assert.True(version3.IsLatest); Assert.Equal(DateTimeOffset.UtcNow.DateTime, version3.LastModified.DateTime, TimeSpan.FromMinutes(1)); Assert.Equal("\"74b87337454200d4d33f80c4663dc5e5\"", version3.Etag); Assert.Equal(4, version3.Size); if (provider != S3Provider.GoogleCloudStorage) { Assert.Equal(StorageClass.Standard, version3.StorageClass); } if (provider == S3Provider.AmazonS3) { Assert.Equal(TestConstants.TestUserId, version3.Owner?.Id); Assert.Equal(TestConstants.TestUsername, version3.Owner?.Name); } //This was the previous version of object 3, so it should not be the latest and have 3 in size S3Version version3A = listResp.Versions[3]; Assert.Equal("3", version3A.ObjectKey); Assert.Equal(putResp3.VersionId, version3A.VersionId); Assert.False(version3A.IsLatest); Assert.Equal(DateTimeOffset.UtcNow.DateTime, version3A.LastModified.DateTime, TimeSpan.FromMinutes(1)); Assert.Equal("\"47bce5c74f589f4867dbd57e9ca9f808\"", version3A.Etag); Assert.Equal(3, version3A.Size); if (provider != S3Provider.GoogleCloudStorage) { Assert.Equal(StorageClass.Standard, version3A.StorageClass); } if (provider == S3Provider.AmazonS3) { Assert.Equal(TestConstants.TestUserId, version3A.Owner?.Id); Assert.Equal(TestConstants.TestUsername, version3A.Owner?.Name); } //This is the latest version of object 2, since it was deleted S3DeleteMarker delMarker = listResp.DeleteMarkers[0]; Assert.True(delMarker.IsLatest); Assert.Equal("2", delMarker.ObjectKey); Assert.Equal(putResp4.VersionId, delMarker.VersionId); Assert.Equal(DateTimeOffset.UtcNow.DateTime, delMarker.LastModified.DateTime, TimeSpan.FromMinutes(1)); if (provider == S3Provider.AmazonS3) { Assert.Equal(TestConstants.TestUserId, delMarker.Owner.Id); Assert.Equal(TestConstants.TestUsername, delMarker.Owner.Name); } }).ConfigureAwait(false); }
public ActionResult UploadFiles() { // Checking no of files injected in Request object if (Request.Files.Count > 0) { try { // Get all files from Request object HttpFileCollectionBase files = Request.Files; for (int i = 0; i < files.Count; i++) { //string path = AppDomain.CurrentDomain.BaseDirectory + "Uploads/"; //string filename = Path.GetFileName(Request.Files[i].FileName); HttpPostedFileBase file = files[i]; string fname = files[i].FileName; string ext = Path.GetExtension(fname); if (ext == ".doc" || ext == ".docx" || ext == ".pdf" && files[i].ContentLength > 0) { // Get the complete folder path and store the file inside it. //fname = Path.Combine(Server.MapPath("~/Uploads/"), fname); //file.SaveAs(fname); // Returns message that successfully uploaded string accessKey = WebConfigurationManager.AppSettings["AWSAccessKey"]; string secreteKey = WebConfigurationManager.AppSettings["AWSSecretKey"]; string s3BucketName = WebConfigurationManager.AppSettings["S3UploadBucket"]; AmazonS3Client client = new AmazonS3Client(accessKey, secreteKey, RegionEndpoint.USEast1); PutObjectRequest putRequest = new PutObjectRequest { BucketName = s3BucketName, Key = fname, InputStream = file.InputStream, ContentType = file.ContentType, }; PutObjectResponse response = client.PutObject(putRequest); var result = new { IsSuccess = true, Message = "File Uploaded Successfully!" }; return(Json(result)); } else { var result = new { IsSuccess = false, Message = "Currently this tool supports PDF..Coming soon!!" }; return(Json(result)); } } return(Json(" ")); } catch (Exception ex) { var result = new { IsSuccess = false, Message = ex.Message }; return(Json(result)); } } else { var result = new { IsSuccess = false, Message = "No files selected." }; return(Json(result)); } }
public AmazonS3WriterResult(string objectKey, PutObjectResponse result) { ObjectKey = objectKey; ETag = result.ETag; Expiration = result.Expiration?.ExpiryDate; }
private void button4_Click(object sender, EventArgs e) { bool errorflag = false; if (textBox1.Text == "") { MessageBox.Show("Could not open source or destination file."); errorflag = true; return; } if (textBox2.Text == "") { MessageBox.Show("Please enter a key."); errorflag = true; return; } if (!File.Exists(textBox1.Text)) { MessageBox.Show("Could not open source or destination file."); errorflag = true; return; } if (!errorflag) { FileStream infile = null, outfile = null; byte[] infilebuff = new byte[512]; byte[] encryptedbuff = new byte[512]; int readstatus; infile = new FileStream(textBox1.Text, FileMode.Open, FileAccess.Read); if (File.Exists(textBox1.Text + ".enc")) { DialogResult dialogResult = MessageBox.Show("Output file Exists Overwrite?", "Error", MessageBoxButtons.YesNo); if (dialogResult == DialogResult.No) { return; } } outfile = new FileStream(textBox1.Text + ".enc", FileMode.Create, FileAccess.Write); string key = textBox2.Text; int keylength = key.Length; int keyposition = 0; while ((readstatus = infile.Read(infilebuff, 0, 512)) > 0) { for (int i = 0; i < readstatus; i++) { if (keyposition > keylength - 1) { keyposition = 0; } encryptedbuff[i] = (byte)(infilebuff[i] ^ key[keyposition]); keyposition++; } outfile.Write(encryptedbuff, 0, readstatus); } infile.Close(); outfile.Close(); MessageBox.Show("Operation completed successfully"); } FileInfo upfile = new FileInfo(textBox1.Text + ".enc"); IAmazonS3 client; client = new AmazonS3Client(Amazon.RegionEndpoint.USEast1); PutObjectRequest request = new PutObjectRequest() { BucketName = "filestoragesoon", Key = upfile.Name, FilePath = textBox1.Text + ".enc" }; PutObjectResponse response2 = client.PutObject(request); MessageBox.Show("Operation completed successfully"); }
public Uri Save(string domain, string path, Stream stream, string contentType, string contentDisposition, ACL acl, string contentEncoding = null, int cacheDays = 5) { bool postWriteCheck = false; if (QuotaController != null) { try { QuotaController.QuotaUsedAdd(_modulename, domain, _dataList.GetData(domain), stream.Length); } catch (TenantQuotaException) { //this exception occurs only if tenant has no free space //or if file size larger than allowed by quota //so we can exit this function without stream buffering etc throw; } catch (Exception) { postWriteCheck = true; } } using (AmazonS3 client = GetClient()) { var request = new PutObjectRequest(); string mime = string.IsNullOrEmpty(contentType) ? MimeMapping.GetMimeMapping(Path.GetFileName(path)) : contentType; request.BucketName = _bucket; request.Key = MakePath(domain, path); request.CannedACL = acl == ACL.Auto ? GetDomainACL(domain) : GetS3Acl(acl); request.ContentType = mime; request.ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256; var requestHeaders = new NameValueCollection(); requestHeaders.Add("Cache-Control", string.Format("public, maxage={0}", (int)TimeSpan.FromDays(cacheDays).TotalSeconds)); requestHeaders.Add("Etag", (DateTime.UtcNow.Ticks).ToString(CultureInfo.InvariantCulture)); requestHeaders.Add("Last-Modified", DateTime.UtcNow.ToString("R")); requestHeaders.Add("Expires", DateTime.UtcNow.Add(TimeSpan.FromDays(cacheDays)).ToString("R")); if (!string.IsNullOrEmpty(contentDisposition)) { requestHeaders.Add("Content-Disposition", contentDisposition); } else if (mime == "application/octet-stream") { requestHeaders.Add("Content-Disposition", "attachment"); } if (!string.IsNullOrEmpty(contentEncoding)) { requestHeaders.Add("Content-Encoding", contentEncoding); } request.AddHeaders(requestHeaders); //Send body var buffered = stream.GetBuffered(); if (postWriteCheck) { QuotaController.QuotaUsedAdd(_modulename, domain, _dataList.GetData(domain), buffered.Length); } request.AutoCloseStream = false; request.InputStream = buffered; PutObjectResponse response = client.PutObject(request); var destinationObjectEncryptionStatus = response.ServerSideEncryptionMethod; //..ServerSideEncryptionMethod; InvalidateCloudFront(MakePath(domain, path)); return(GetUri(domain, path)); } }
public async Task <ResponsePhotoUpload> UploadFileAsync(IFormFile file, string subFolderName) { try { if (!await CreateSubfolderIfNotExistAsync(subFolderName)) { return(new ResponsePhotoUpload() { Message = "Yükleme Başarısız. Subfolder oluşturulamadı.", Success = false, PhotoUrl = "", ThumbnailUrl = "" }); } // get the file and convert it to the byte[] byte[] fileBytes = new Byte[file.Length]; file.OpenReadStream().Read(fileBytes, 0, Int32.Parse(file.Length.ToString())); string fileName = DateTime.Now.ToString("H.mm.ss") + Path.GetExtension(file.FileName); // create unique file name for prevent the mess var keyName = key + "/" + subFolderName + "/" + Guid.NewGuid() + "-" + fileName; PutObjectResponse response = null; using (var stream = new MemoryStream(fileBytes)) { var request = new PutObjectRequest { BucketName = bucketName, Key = keyName, InputStream = stream, ContentType = file.ContentType, CannedACL = S3CannedACL.PublicRead }; response = await s3Client.PutObjectAsync(request); }; if (response.HttpStatusCode == System.Net.HttpStatusCode.OK) { return(new ResponsePhotoUpload() { Message = "Yükleme Başarılı", Success = true, PhotoUrl = "https://" + bucketName + ".s3." + bucketRegion.SystemName + "." + bucketRegion.PartitionDnsSuffix + "/" + keyName, ThumbnailUrl = "https://" + bucketName + ".s3." + bucketRegion.SystemName + "." + bucketRegion.PartitionDnsSuffix + "/" + keyName }); } else { // this model is up to you, in my case I have to use it following; return(new ResponsePhotoUpload() { Message = "Yükleme Başarısız", Success = false, PhotoUrl = "", ThumbnailUrl = "" }); } } catch (Exception) { return(new ResponsePhotoUpload() { Message = "Yükleme Başarısız. Hata oluştu!", Success = false, PhotoUrl = "", ThumbnailUrl = "" }); } }
async Task ProcessObjectDataAsync(ILambdaContext context) { List <int> arabNums = new List <int> { }; try { context.Logger.LogLine($"GetObjectAsync, key: {this.bktKey}, bucket: {inBucketName}."); GetObjectRequest request = new GetObjectRequest { BucketName = inBucketName, Key = this.bktKey, }; using (GetObjectResponse response = (await s3Client.GetObjectAsync(request))) using (Stream responseStream = response.ResponseStream) using (StreamReader reader = new StreamReader(responseStream)) { string line; while ((line = reader.ReadLine()) != null) { try { int numToAdd = Int32.Parse(line); if (numToAdd > 0 && numToAdd < 4000) { arabNums.Add(numToAdd); } else { context.Logger.LogLine("The input file contains number(s) out of range 0 - 4000 n:" + numToAdd); } } catch (FormatException e) { context.Logger.LogLine("The input file contains non integer-convertible line(s) {0}" + e.Message); return; } } try { string convertedNumStr = string.Empty; PutObjectRequest pReq = new PutObjectRequest { BucketName = outBucketName, ContentBody = convertedNumStr, Key = this.bktKey, }; foreach (int iia in arabNums) { convertedNumStr += ArabToRoman(iia) + "\\n"; } PutObjectResponse pRes = await s3Client.PutObjectAsync(pReq); } catch (Exception e) { context.Logger.LogLine("Error writing the output bucket file" + e.Message); return; } } } catch (AmazonS3Exception e) { context.Logger.LogLine("Error reading object. Message:" + e.Message); return; } catch (Exception e) { context.Logger.LogLine("Unknown error reading object. Message:" + e.Message); return; } }
public async Task <ImageUploadedModel> UploadImage( string bucketName, string bucketUrl, string objectKey, S3StorageClass storageClass, S3CannedACL permissions, string glacierVaultName, ImageInfo image) { ImageUploadedModel model = new ImageUploadedModel(); try { PutObjectRequest putRequest = new PutObjectRequest { BucketName = bucketName, Key = objectKey, StorageClass = storageClass, CannedACL = permissions, ContentType = image.MimeType, AutoCloseStream = false }; putRequest.Metadata.Add("width", image.Width.ToString()); putRequest.Metadata.Add("height", image.Height.ToString()); putRequest.InputStream = image.Image; byte[] md5Hash = image.Image.Md5Hash(); putRequest.MD5Digest = md5Hash.ToBase64String(); PutObjectResponse response = await S3Client.PutObjectAsync(putRequest); string eTag = response.ETag.Trim('"').ToLowerInvariant(); string expectedETag = md5Hash.ToS3ETagString(); if (eTag != expectedETag) { throw new Exception("The eTag received from S3 doesn't match the eTag computed before uploading. This usually indicates that the image has been corrupted in transit."); } // upload to Glacier if needed if (!string.IsNullOrWhiteSpace(glacierVaultName)) { ArchiveDescription description = new ArchiveDescription { ObjectKey = objectKey, ContentType = image.MimeType, Width = image.Width, Height = image.Height }; // reset stream position in image image.Image.Position = 0; UploadArchiveRequest glacierRequest = new UploadArchiveRequest { ArchiveDescription = JsonConvert.SerializeObject(description, Formatting.None), Body = image.Image, VaultName = glacierVaultName, Checksum = TreeHashGenerator.CalculateTreeHash(image.Image) }; UploadArchiveResponse glacierResponse = await GlacierClient.UploadArchiveAsync(glacierRequest); model.ArchiveId = glacierResponse.ArchiveId; } model.ObjectKey = objectKey; model.ETag = eTag; model.ObjectLocation = bucketUrl + objectKey; model.VersionId = response.VersionId; } catch (Exception ex) { model.Exception = ex; } return(model); }
private static void UnmarshallResult(XmlUnmarshallerContext context, PutObjectResponse response) { IWebResponseData responseData = context.ResponseData; if (responseData.IsHeaderPresent("x-amz-expiration")) { response.Expiration = new Expiration(responseData.GetHeaderValue("x-amz-expiration")); } if (responseData.IsHeaderPresent("x-amz-server-side-encryption")) { response.ServerSideEncryptionMethod = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-server-side-encryption")); } if (responseData.IsHeaderPresent("x-amz-server-side-encryption-customer-algorithm")) { response.ServerSideEncryptionCustomerMethod = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-server-side-encryption-customer-algorithm")); } if (responseData.IsHeaderPresent("x-amz-server-side-encryption-customer-key-MD5")) { response.ServerSideEncryptionCustomerProvidedKeyMD5 = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-server-side-encryption-customer-key-MD5")); } if (responseData.IsHeaderPresent("ETag")) { response.ETag = S3Transforms.ToString(responseData.GetHeaderValue("ETag")); } if (responseData.IsHeaderPresent("x-amz-version-id")) { response.VersionId = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-version-id")); } if (responseData.IsHeaderPresent(HeaderKeys.XAmzServerSideEncryptionAwsKmsKeyIdHeader)) { response.ServerSideEncryptionKeyManagementServiceKeyId = S3Transforms.ToString(responseData.GetHeaderValue(HeaderKeys.XAmzServerSideEncryptionAwsKmsKeyIdHeader)); } if (responseData.IsHeaderPresent("x-amz-server-side-encryption-context")) { response.ServerSideEncryptionKeyManagementServiceEncryptionContext = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-server-side-encryption-context")); } if (responseData.IsHeaderPresent(S3Constants.AmzHeaderRequestCharged)) { response.RequestCharged = RequestCharged.FindValue(responseData.GetHeaderValue(S3Constants.AmzHeaderRequestCharged)); } if (responseData.IsHeaderPresent(S3Constants.AmzHeaderBucketKeyEnabled)) { response.BucketKeyEnabled = S3Transforms.ToBool(responseData.GetHeaderValue(S3Constants.AmzHeaderBucketKeyEnabled)); } if (responseData.IsHeaderPresent("x-amz-checksum-crc32")) { response.ChecksumCRC32 = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-checksum-crc32")); } if (responseData.IsHeaderPresent("x-amz-checksum-crc32c")) { response.ChecksumCRC32C = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-checksum-crc32c")); } if (responseData.IsHeaderPresent("x-amz-checksum-sha1")) { response.ChecksumSHA1 = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-checksum-sha1")); } if (responseData.IsHeaderPresent("x-amz-checksum-sha256")) { response.ChecksumSHA256 = S3Transforms.ToString(responseData.GetHeaderValue("x-amz-checksum-sha256")); } return; }
public static async Task <UploadPhotoModel> UploadObject150(string file, string fileName) { try { // connecting to the client var fileExtension = "."; var FileData = file.Split(','); file = FileData[1]; if (FileData[0] != null) { fileExtension += FileData[0].Split('/')[1].Split(';')[0]; } var client = new AmazonS3Client(accessKey, accessSecret, Amazon.RegionEndpoint.USEast2); byte[] fileBytes = Convert.FromBase64String(file); PutObjectResponse response = null; byte[] fileBytes150; //var streamorignal = new MemoryStream(fileBytes); MemoryStream streamorignal = new MemoryStream(fileBytes); Image img = Image.FromStream(streamorignal); var width = img.Width; var height = img.Height; var givenWidth = 150; var givenHeight = 150; // Figure out the ratio double ratioX = (double)givenWidth / (double)width; double ratioY = (double)givenHeight / (double)height; // use whichever multiplier is smaller double ratio = ratioX < ratioY ? ratioX : ratioY; // now we can get the new height and width int h = Convert.ToInt32(height * ratio); int w = Convert.ToInt32(width * ratio); using (Bitmap b = new Bitmap(img, new Size(w, h))) { using (MemoryStream ms2 = new MemoryStream()) { b.Save(ms2, System.Drawing.Imaging.ImageFormat.Jpeg); fileBytes150 = ms2.ToArray(); } } using (var stream = new MemoryStream(fileBytes150)) { var request = new PutObjectRequest { BucketName = bucket150, Key = fileName, InputStream = stream, //ContentType = file.ContentType, CannedACL = S3CannedACL.PublicRead, }; response = await client.PutObjectAsync(request); }; if (response.HttpStatusCode == System.Net.HttpStatusCode.OK) { return(new UploadPhotoModel { Success = true, FileName = fileName }); } else { return(new UploadPhotoModel { Success = false, FileName = fileName }); } } catch (Exception ex) { throw ex; } //catch (AmazonS3Exception amazonS3Exception) //{ // if ( // amazonS3Exception.ErrorCode != null && // (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || // amazonS3Exception.ErrorCode.Equals("InvalidSecurity")) // ) // { // Console.WriteLine("Please check the provided AWS Credentials."); // Console.WriteLine("If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"); // } // else // { // Console.WriteLine("An Error, number {0}, occurred when listing buckets with the message '{1}", amazonS3Exception.ErrorCode, amazonS3Exception.Message); // } // throw amazonS3Exception; //} }
public void TestConditionalObject() { string key = "key-1"; string content = "testing a conditional PUT"; DateTime in_the_past = TimeZoneInfo.ConvertTimeToUtc(DateTime.Now.AddMinutes(-5)); DateTime in_the_future = TimeZoneInfo.ConvertTimeToUtc(DateTime.Now.AddMinutes(10)); PutObjectRequestECS por = new PutObjectRequestECS() { BucketName = temp_bucket, Key = key, ContentBody = content, ContentType = "text/plain" }; PutObjectResponse response = client.PutObject(por); string eTag = response.ETag; por.UnmodifiedSinceDate = in_the_past; try { client.PutObject(por); Assert.Fail("Expected 412 response code"); } catch (AmazonS3Exception e) { Assert.AreEqual(System.Net.HttpStatusCode.PreconditionFailed, e.StatusCode); } // clear out unmodified por = new PutObjectRequestECS() { BucketName = temp_bucket, Key = key, ContentBody = content, ContentType = "text/plain" }; // set same stamp as modified - test por.ModifiedSinceDate = in_the_past; client.PutObject(por); por.ModifiedSinceDate = in_the_future; try { client.PutObject(por); Assert.Fail("Expected 412 response code"); } catch (AmazonS3Exception e) { Assert.AreEqual(System.Net.HttpStatusCode.PreconditionFailed, e.StatusCode); } // clear out modified por = new PutObjectRequestECS() { BucketName = temp_bucket, Key = key, ContentBody = content, ContentType = "text/plain" }; // if etag match - pass por.EtagToMatch = eTag; client.PutObject(por); por.EtagToMatch = null; // if etag doesn't match - fail por.EtagToNotMatch = eTag; try { client.PutObject(por); Assert.Fail("Expected 412 response code"); } catch (AmazonS3Exception e) { Assert.AreEqual(System.Net.HttpStatusCode.PreconditionFailed, e.StatusCode); } eTag = "0f7373bfe4bda6531b15229e9b9e8f75"; // if etag doesn't match - pass por.EtagToNotMatch = eTag; client.PutObject(por); por.EtagToNotMatch = null; // if etag to match - fail por.EtagToMatch = eTag; try { client.PutObject(por); Assert.Fail("Expected 412 response code"); } catch (AmazonS3Exception e) { Assert.AreEqual(System.Net.HttpStatusCode.PreconditionFailed, e.StatusCode); } por.EtagToMatch = null; // if match * (if the key exists, i.e. update only) pass por.EtagToMatch = "*"; client.PutObject(por); por.EtagToMatch = null; // test if-none-match * (if key is new, i.e. create only) fail por.EtagToNotMatch = "*"; try { client.PutObject(por); Assert.Fail("Expected 412 response code"); } catch (AmazonS3Exception e) { Assert.AreEqual(System.Net.HttpStatusCode.PreconditionFailed, e.StatusCode); } por.Key = "key-2"; // if-non-match * (if the key is new i.e. create only) pass client.PutObject(por); por.EtagToNotMatch = null; por.Key = "key-3"; // if-match * (if the key exists i.e. update only) fail por.EtagToMatch = "*"; try { //client.PutObject(por); STORAGE - 14736 //Assert.Fail("Expected 412 response code"); STORAGE - 14736 } catch (AmazonS3Exception e) { Assert.AreEqual(System.Net.HttpStatusCode.PreconditionFailed, e.StatusCode); } }
public ActionResult Test1(HttpPostedFileBase ImageFile) { if (ImageFile == null || ImageFile.ContentLength == 0) { //Show spinner ViewBag.Error = "Please select a file.<br>"; return(View("Index")); } else { if (ImageFile.FileName.EndsWith("jpg") || ImageFile.FileName.EndsWith("png")) { //shaun's code string path = Server.MapPath("~/Doc/" + ImageFile.FileName); if (System.IO.File.Exists(path)) { System.IO.File.Delete(path); } ImageFile.SaveAs(path); // Configure API key authorization: Apikey Configuration.Default.AddApiKey("Apikey", "XXX"); var apiInstance = new NsfwApi(); var imageFile = new System.IO.FileStream(path, System.IO.FileMode.Open); // System.IO.Stream | Image file to perform the operation on. Common file formats such as PNG, JPEG are supported. System.IO.Stream stream = ImageFile.InputStream; // Do not actually store your IAM credentials in code. EC2 Role is best var awsKey = "XXX"; var awsSecretKey = "XXX"; var bucketRegion = Amazon.RegionEndpoint.USEast1; // Your bucket region var s3 = new AmazonS3Client(awsKey, awsSecretKey, bucketRegion); var putRequest = new PutObjectRequest(); //imageURL string fileName = ImageFile.FileName; string imgURL = "csc-assignment-2-2020.s3-ap-southeast-1.amazonaws.com/" + fileName; //TestDBEntities db = new TestDBEntities(); //TalentData data = new TalentData(); try { // Describe an image in natural language NsfwResult result = apiInstance.NsfwClassify(imageFile); Debug.WriteLine(result); ViewBag.Score = result.Score; ViewBag.Outcome = result.ClassificationOutcome; //Hide spinner switch (result.ClassificationOutcome) { case "SafeContent_HighProbability": { putRequest.BucketName = "csc-assignment-2-2020"; // Your bucket name putRequest.ContentType = "image/jpeg"; putRequest.InputStream = ImageFile.InputStream; // key will be the name of the image in your bucket putRequest.Key = ImageFile.FileName; PutObjectResponse putResponse = s3.PutObject(putRequest); //data.talentName = fileName; //data.imageURL = imgURL; //db.TalentDatas.Add(data); //db.SaveChanges(); //List<TalentDataViewModel> dataList = db.TalentDatas.Select(x => new TalentDataViewModel //{ // talentName = x.talentName, // imageURL = x.imageURL //}).ToList(); return(View("Success", result)); } case "UnsafeContent_HighProbability": return(View("Fail", result)); case "RacyContent": return(View("Fail", result)); case "SafeContent_ModerateProbability": { putRequest.BucketName = "csc-assignment-2-2020"; // Your bucket name putRequest.ContentType = "image/jpeg"; putRequest.InputStream = ImageFile.InputStream; // key will be the name of the image in your bucket putRequest.Key = ImageFile.FileName; PutObjectResponse putResponse = s3.PutObject(putRequest); return(View("Success", result)); } default: return(View("Index")); } return(View("Success")); } catch (Exception e) { Debug.Print("Exception when calling RecognizeApi.RecognizeDescribe: " + e.Message); return(View("Index")); } } else { ViewBag.Error = "File type is incorrect.<br>"; return(View("Index")); } } }//end of Upload & Save + NSFW
public async Task FunctionHandler(System.IO.Stream request, ILambdaContext context) { Console.WriteLine("======== API Event ========="); string requestStr; using (StreamReader reader = new StreamReader(request)) { requestStr = reader.ReadToEnd(); } dynamic requestJson = JObject.Parse(requestStr); dynamic inputJson = JObject.Parse(requestJson.body.Value); Console.WriteLine("======== Request String ========="); Console.WriteLine(requestStr); Console.WriteLine("======== Context ========="); Console.WriteLine(JsonConvert.SerializeObject(context, Formatting.None)); Console.WriteLine("======== Box Input (body) ========="); Console.WriteLine(JsonConvert.SerializeObject(inputJson, Formatting.None)); //var jobName = $"f{inputJson.source.id}_v{inputJson.source.file_version.id}"; var jobName = $"f{inputJson.source.id}"; // move file to S3 for processing (aws can not process using anything other than an S3 uir) var fileUrl = BoxHelper.getFileUrl(inputJson.source.id.Value, inputJson.token); Console.WriteLine($"FileUrl: {fileUrl}"); string fileExt = Path.GetExtension(inputJson.source.name.Value).TrimStart('.'); string fileName = $"{jobName}.{fileExt}"; string mimeType = MimeMapping.GetMimeType(fileExt); PutObjectResponse response = await UploadBoxFileToS3(fileUrl, config.S3BucketName, mimeType, fileName); Console.WriteLine("======== Put Object Response ========="); Console.WriteLine(JsonConvert.SerializeObject(response, Formatting.None)); if (response.HttpStatusCode.CompareTo(HttpStatusCode.OK) != 0) { throw new Exception("Status code error"); } Console.WriteLine("JobName: " + jobName); // Check for an existing job (maybe lambda was timed out and then re-run) var job = await GetTranscriptionJob(jobName); if (job == null || job?.TranscriptionJobStatus == null) { job = await StartTranscriptionJob(jobName, GetS3FileUrl(config.S3BucketName, fileName), fileExt); } switch (job?.TranscriptionJobStatus.Value) { case JobStatus.IN_PROGRESS: job = await WaitForCompletion(jobName); break; case JobStatus.FAILED: Console.WriteLine("AWS Transcription job failed. Aborting"); return; } var result = await ProcessTranscriptionJob(job); CallCenterSkill.ProcessTranscriptionResults(ref result); DeleteObjectNonVersionedBucketAsync(fileName).Wait(); await BoxHelper.GenerateCards(result, inputJson); }
public async Task <IActionResult> Update([FromBody] UpdateCarousel model) { Carousel carousel = this.carouselService.Read(model.Id); if (carousel == null) { return(BadRequest()); } if (!string.IsNullOrEmpty(model.GUID)) { PutObjectRequest putRequest = new PutObjectRequest(); putRequest.BucketName = bucket; putRequest.Key = folder + "/" + model.GUID + model.Extension; putRequest.FilePath = path + @"\temporary\" + model.GUID + model.Extension; putRequest.CannedACL = S3CannedACL.PublicRead; PutObjectRequest putRequest_570_240 = new PutObjectRequest(); putRequest_570_240.BucketName = bucket; putRequest_570_240.Key = folder + "/" + model.GUID + "_570_240" + model.Extension; putRequest_570_240.FilePath = path + @"\temporary\" + model.GUID + "_570_240" + model.Extension; putRequest_570_240.CannedACL = S3CannedACL.PublicRead; PutObjectRequest putRequest_285_120 = new PutObjectRequest(); putRequest_285_120.BucketName = bucket; putRequest_285_120.Key = folder + "/" + model.GUID + "_285_120" + model.Extension; putRequest_285_120.FilePath = path + @"\temporary\" + model.GUID + "_285_120" + model.Extension; putRequest_285_120.CannedACL = S3CannedACL.PublicRead; PutObjectRequest putRequest_114_48 = new PutObjectRequest(); putRequest_114_48.BucketName = bucket; putRequest_114_48.Key = folder + "/" + model.GUID + "_114_48" + model.Extension; putRequest_114_48.FilePath = path + @"\temporary\" + model.GUID + "_114_48" + model.Extension; putRequest_114_48.CannedACL = S3CannedACL.PublicRead; PutObjectRequest putRequest_57_24 = new PutObjectRequest(); putRequest_57_24.BucketName = bucket; putRequest_57_24.Key = folder + "/" + model.GUID + "_57_24" + model.Extension; putRequest_57_24.FilePath = path + @"\temporary\" + model.GUID + "_57_24" + model.Extension; putRequest_57_24.CannedACL = S3CannedACL.PublicRead; AmazonS3Client client = new AmazonS3Client(accessKey, secretKey, Amazon.RegionEndpoint.GetBySystemName(region)); try { PutObjectResponse putResponse = await client.PutObjectAsync(putRequest); PutObjectResponse putResponse_570_240 = await client.PutObjectAsync(putRequest_570_240); PutObjectResponse putResponse_285_120 = await client.PutObjectAsync(putRequest_285_120); PutObjectResponse putResponse_114_48 = await client.PutObjectAsync(putRequest_114_48); PutObjectResponse putResponse_57_24 = await client.PutObjectAsync(putRequest_57_24); if (putResponse.HttpStatusCode == HttpStatusCode.OK && putResponse_570_240.HttpStatusCode == HttpStatusCode.OK && putResponse_285_120.HttpStatusCode == HttpStatusCode.OK && putResponse_114_48.HttpStatusCode == HttpStatusCode.OK && putResponse_57_24.HttpStatusCode == HttpStatusCode.OK) { bool status = await Task.Run(() => { carousel.ImageURL = baseS3URL + "/" + model.GUID + model.Extension; carousel.Location = model.Location; carousel.Proverb = model.Proverb; carousel.Source = model.Source; carousel.PublishedDate = model.PublishedDate; return(this.carouselService.Update(carousel)); }); if (status) { return(Ok(status)); } else { DeleteObjectRequest deleteRequest = new DeleteObjectRequest() { BucketName = bucket + folder, Key = folder + "/" + model.GUID + model.Extension }; DeleteObjectResponse deleteResponse = await client.DeleteObjectAsync(deleteRequest); DeleteObjectRequest deleteRequest_570_240 = new DeleteObjectRequest() { BucketName = bucket + folder, Key = folder + "/" + model.GUID + "_570_240" + model.Extension }; DeleteObjectResponse deleteResponse_570_240 = await client.DeleteObjectAsync(deleteRequest_570_240); DeleteObjectRequest deleteRequest_285_120 = new DeleteObjectRequest() { BucketName = bucket + folder, Key = folder + "/" + model.GUID + "_285_120" + model.Extension }; DeleteObjectResponse deleteResponse_285_120 = await client.DeleteObjectAsync(deleteRequest_285_120); DeleteObjectRequest deleteRequest_114_48 = new DeleteObjectRequest() { BucketName = bucket + folder, Key = folder + "/" + model.GUID + "_114_48" + model.Extension }; DeleteObjectResponse deleteResponse_114_48 = await client.DeleteObjectAsync(deleteRequest_114_48); DeleteObjectRequest deleteRequest_57_24 = new DeleteObjectRequest() { BucketName = bucket + folder, Key = folder + "/" + model.GUID + "_57_24" + model.Extension }; DeleteObjectResponse deleteResponse_57_24 = await client.DeleteObjectAsync(deleteRequest_57_24); return(new StatusCodeResult(500)); } } } catch (Exception ex) { string errors = ex.GetBaseException().ToString(); } client.Dispose(); return(new StatusCodeResult(500)); } else { bool status = await Task.Run(() => { carousel.ImageURL = baseS3URL + "/" + model.GUID + model.Extension; carousel.Location = model.Location; carousel.Proverb = model.Proverb; carousel.Source = model.Source; carousel.PublishedDate = model.PublishedDate; return(this.carouselService.Update(carousel)); }); if (status) { return(Ok(status)); } else { return(BadRequest()); } } }
public string UploadFile(UploadAssetRequestParameters request, S3StorageClass storageClass, long assetId) { string objectKey = GetS3FileName(request.FilePath, assetId); try { List <Amazon.S3.Model.Tag> tagSet = new List <Amazon.S3.Model.Tag>(); if (request.Tags != null) { foreach (KeyValuePair <string, string> entry in request.Tags) { tagSet.Add(new Amazon.S3.Model.Tag { Key = entry.Key, Value = entry.Value }); } } var awsRegion = Amazon.RegionEndpoint.GetBySystemName(ConfigurationManager.AppSettings["AWSRegion"].ToString()); IAmazonS3 client = new AmazonS3Client(awsRegion); PutObjectRequest putRequest = new PutObjectRequest { BucketName = request.BucketName, Key = objectKey, TagSet = tagSet, StorageClass = storageClass }; //upload by filepath or memorystream if (request.InputStream != null && request.InputStream.Length > 0) { putRequest.InputStream = new MemoryStream(request.InputStream); } else { putRequest.FilePath = request.FilePath; } PutObjectResponse response = client.PutObject(putRequest); return(objectKey);//to fetch the uploaded content append cloudfrontUrl + objectKey } catch (AmazonS3Exception amazonS3Exception) { string errorMessage = String.Empty; if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { throw new Exception("Check the provided AWS Credentials."); } else { errorMessage = string.Format("Error occurred. Message:'{0}' when writing an object", amazonS3Exception.Message); ErrorUtility.LogError(amazonS3Exception, GartnerApplication.UtilityService, string.Format("Error: AWS: UploadFile: " + errorMessage)); } } catch (Exception e) { ErrorUtility.LogError(e, GartnerApplication.Unknown, string.Format("Error: UploadFile: " + e.Message)); } return(string.Empty);//empty string denote that process has got some error }
public HttpResponseMessage UploadProductGroupImg() { //Get the uploaded image from the Files collection if (System.Web.HttpContext.Current.Request.Files.AllKeys.Any()) { //Get data from request file var httpPostedFile = System.Web.HttpContext.Current.Request.Files["HelpSectionImages"]; var productId = int.Parse(System.Web.HttpContext.Current.Request.Form["ProductId"]); var id = 0; if (httpPostedFile != null && productId > 0) { var query = string.Format("SELECT * FROM dbo.Product WHERE Id = {0}", productId); Product product; using (var cnn = SqlHelper.OpenConnection()) { product = cnn.Query <Product>(query, new { id = productId }).FirstOrDefault(); } if (product == null) { return(Request.CreateResponse(HttpStatusCode.BadRequest, new { success = false, message = "Sản phẩm không tồn tại" })); } query = @"INSERT INTO [dbo].[ProductImage] ([ProductId] ,[ImgOriginal] ,[Img500x500] ,[Img300x300] ,[Img100x100] ,[CreatedDate] ,[CreatedUser] ,[ModifiedDate] ,[ModifiedUser]) VALUES (@productId ,null ,null ,null ,null ,GETDATE() ,'admin' ,GETDATE() ,'admin') select SCOPE_IDENTITY();" ; using (var cnn = SqlHelper.OpenConnection()) { id = cnn.ExecuteScalar <int>(query, new { productId }); } var client = new AmazonS3Client(accessKey, secretKey, Amazon.RegionEndpoint.APSoutheast1); var keyName = product.FriendlyUrl + "-g" + id + ".jpg"; PutObjectRequest request = new PutObjectRequest() { BucketName = bucketName, Key = keyName, InputStream = httpPostedFile.InputStream, AutoCloseStream = true, CannedACL = S3CannedACL.PublicRead, StorageClass = S3StorageClass.ReducedRedundancy }; PutObjectResponse response = client.PutObject(request); if (response.HttpStatusCode == HttpStatusCode.OK) { var imgOriginal = host + "/bcs-s3/products/" + keyName; using (var cnn = SqlHelper.OpenConnection()) { query = "Update ProductImage set ImgOriginal = @imgOriginal Where Id = @id"; cnn.Execute(query, new { imgOriginal, id }); } Task.Factory.StartNew(() => { using (var wc = new WebClient()) { wc.DownloadString("https:" + host + "/image/product/group/" + id); } }); return(Request.CreateResponse(HttpStatusCode.OK, new { success = true, message = "Upload ảnh thành công" })); } } } return(Request.CreateResponse(HttpStatusCode.BadRequest, new { success = false, message = "Lỗi không xác định, vui lòng liên hệ quản trị" })); }
/// <summary> /// Moves the image to Amazon /// </summary> /// <param name="por"></param> /// <param name="mfbii">The object to move.</param> public void MoveByRequest(PutObjectRequest por, MFBImageInfo mfbii) { if (mfbii == null) { throw new ArgumentNullException(nameof(mfbii)); } if (por == null) { throw new ArgumentNullException(nameof(por)); } lock (lockObject) { try { using (IAmazonS3 s3 = AWSConfiguration.S3Client()) { PutObjectResponse s3r = null; using (por.InputStream) { s3r = s3.PutObject(por); } if (s3r != null) { switch (mfbii.ImageType) { case MFBImageInfo.ImageFileType.JPEG: File.Delete(mfbii.PhysicalPathFull); break; case MFBImageInfo.ImageFileType.PDF: { try { if (String.IsNullOrEmpty(mfbii.Comment)) { mfbii.Comment = mfbii.ThumbnailFile; } mfbii.ImageType = MFBImageInfo.ImageFileType.S3PDF; mfbii.RenameLocalFile(mfbii.ThumbnailFile.Replace(FileExtensions.PDF, FileExtensions.S3PDF)); // Write the comment to the resulting file. using (FileStream fs = File.OpenWrite(mfbii.PhysicalPathThumbnail)) { fs.SetLength(0); byte[] rgBytes = Encoding.UTF8.GetBytes(mfbii.Comment.ToCharArray()); fs.Write(rgBytes, 0, rgBytes.Length); } } catch (Exception ex) when(ex is UnauthorizedAccessException || ex is FileNotFoundException || ex is IOException) { mfbii.ImageType = MFBImageInfo.ImageFileType.PDF; } } break; default: break; } // ALWAYS update the db mfbii.UpdateDBLocation(false); } } } catch (AmazonS3Exception ex) { throw new MyFlightbookException(WrapAmazonS3Exception(ex), ex); } } }
public HttpResponseMessage UploadRatingImg() { //Get the uploaded image from the Files collection if (System.Web.HttpContext.Current.Request.Files.AllKeys.Any()) { //Get data from request file var httpPostedFile = System.Web.HttpContext.Current.Request.Files["ImageUpload"]; var id = int.Parse(System.Web.HttpContext.Current.Request.Form["Id"]); var pos = System.Web.HttpContext.Current.Request.Form["Pos"]; if (httpPostedFile != null && id > 0) { var query = string.Format("SELECT * FROM dbo.Rating (nolock) WHERE Id = {0}", id); Rating ratingImage; using (var cnn = SqlHelper.OpenConnection()) { ratingImage = cnn.Query <Rating>(query).FirstOrDefault(); } if (ratingImage == null) { return(Request.CreateResponse(HttpStatusCode.BadRequest, new { success = false, message = "Sản phẩm không tồn tại" })); } var client = new AmazonS3Client(accessKey, secretKey, Amazon.RegionEndpoint.APSoutheast1); var keyName = DateTime.Now.GetHashCode().ToString("x") + ".jpg"; PutObjectRequest request = new PutObjectRequest() { BucketName = bucketRatingName, Key = keyName, InputStream = httpPostedFile.InputStream, AutoCloseStream = true, CannedACL = S3CannedACL.PublicRead, StorageClass = S3StorageClass.ReducedRedundancy }; PutObjectResponse response = client.PutObject(request); if (response.HttpStatusCode == HttpStatusCode.OK) { var imgOriginal = host + "/" + bucketRatingName + "/" + keyName; using (var cnn = SqlHelper.OpenConnection()) { query = $"Update Rating set Image_{pos} = @imgOriginal Where Id = @id"; cnn.Execute(query, new { imgOriginal, id }); } Task.Factory.StartNew(() => { using (var wc = new WebClient()) { wc.DownloadString("https:" + host + "/image/rating/" + id + "/" + pos); //Bug https } }); return(Request.CreateResponse(HttpStatusCode.OK, new { success = true, message = "Upload ảnh thành công" })); } } } return(Request.CreateResponse(HttpStatusCode.BadRequest, new { success = false, message = "Lỗi không xác định, vui lòng liên hệ quản trị" })); }
/// <summary> /// Returns empty string on success /// </summary> public virtual Asset ProcessPhoto(Asset asset) { return(base.ExecuteFunction("ProcessPhoto", delegate() { try { // get recent, just in case asset = this.API.Direct.Assets.GetById(asset.asset_id); if (asset == null || !asset.resize_required) { return null; // short circuit } asset.resize_attempts++; asset.resize_attempt_utc = DateTime.UtcNow; this.API.Direct.Assets.UpdateResizeAttemptInfo(asset.asset_id, asset.resize_attempts, asset.resize_attempt_utc.GetValueOrDefault(), "Attempting to resize"); System.Drawing.Image original = null; try { string raw_url = asset.raw_url; if (asset.type == AssetType.Video) { raw_url = asset.thumb_large_url; } original = this.GetImageFromUrl(raw_url); if (original == null) { return null; } int ix = raw_url.LastIndexOf('/'); string prefix = asset.raw_url.Substring(0, ix).Trim('/') + "/"; ix = prefix.ToLower().IndexOf(this.AmazonBucket.ToLower()); if (ix > -1) { prefix = prefix.Substring(ix + this.AmazonBucket.Length); } ix = prefix.ToLower().IndexOf(this.AmazonCloudFrontUrl.ToLower()); if (ix > -1) { prefix = prefix.Substring(ix + this.AmazonCloudFrontUrl.Length); } prefix = prefix.TrimStart('/'); string extension = Path.GetExtension(asset.raw_url); string imageCodecName = "jpeg"; EncoderParameters encoderParameters = new EncoderParameters(1); encoderParameters.Param[0] = new EncoderParameter(System.Drawing.Imaging.Encoder.Quality, 90L); PixelFormat format = PixelFormat.Format24bppRgb; ResizeMode resizeMode = ResizeMode.Fill; if (!string.IsNullOrEmpty(asset.resize_mode)) { if (!Enum.TryParse <ResizeMode>(asset.resize_mode, true, out resizeMode)) { resizeMode = ResizeMode.Fill; // fallback } } if (resizeMode == ResizeMode.Fit) // we never want a backcolor { imageCodecName = "png"; extension = ".png"; format = PixelFormat.Format32bppArgb; } string cacheBuster = string.Format("{0}{1:HHmmss}", DateTime.UtcNow.DayOfYear, DateTime.UtcNow); string destinationSmall = prefix + cacheBuster + Path.GetFileNameWithoutExtension(asset.raw_url) + "_sm" + extension; string destinationMedium = prefix + cacheBuster + Path.GetFileNameWithoutExtension(asset.raw_url) + "_md" + extension; string destinationLarge = prefix + cacheBuster + Path.GetFileNameWithoutExtension(asset.raw_url) + "_lg" + extension; // create thumb Image smallImage = null; Image mediumImage = null; Image largeImage = null; try { Size dimensions = new Size(); if (TryParseDimensions(asset.thumb_small_dimensions, out dimensions)) { smallImage = ImageFormatter.Resize(original, resizeMode, (AnchorStyles.Middle | AnchorStyles.Center), dimensions.Width, dimensions.Height, format, InterpolationMode.HighQualityBicubic, Color.Transparent); } if (TryParseDimensions(asset.thumb_medium_dimensions, out dimensions)) { mediumImage = ImageFormatter.Resize(original, resizeMode, (AnchorStyles.Middle | AnchorStyles.Center), dimensions.Width, dimensions.Height, format, InterpolationMode.HighQualityBicubic, Color.Transparent); } if (TryParseDimensions(asset.thumb_large_dimensions, out dimensions)) { largeImage = ImageFormatter.Resize(original, resizeMode, (AnchorStyles.Middle | AnchorStyles.Center), dimensions.Width, dimensions.Height, format, InterpolationMode.HighQualityBicubic, Color.Transparent); } using (AmazonS3Client client = new AmazonS3Client(this.AmazonKeyID, this.AmazonSecret, RegionEndpoint.USEast1)) { // save small if (smallImage != null) { this.FlipImageIfNeeded(original, smallImage); using (System.IO.MemoryStream memoryStream = new System.IO.MemoryStream()) { smallImage.Save(memoryStream, GetCodecInfo(imageCodecName), encoderParameters); memoryStream.Position = 0; PutObjectRequest request = new PutObjectRequest() { BucketName = this.AmazonBucket, Key = destinationSmall, CannedACL = S3CannedACL.PublicRead, InputStream = memoryStream }; PutObjectResponse result = client.PutObject(request); if (result == null || (result.HttpStatusCode != HttpStatusCode.OK && result.HttpStatusCode != HttpStatusCode.Created)) { throw new Exception("Error saving to amazon"); } } } // save medium if (mediumImage != null) { this.FlipImageIfNeeded(original, mediumImage); using (System.IO.MemoryStream memoryStream = new System.IO.MemoryStream()) { mediumImage.Save(memoryStream, GetCodecInfo(imageCodecName), encoderParameters); memoryStream.Position = 0; PutObjectRequest request = new PutObjectRequest() { BucketName = this.AmazonBucket, Key = destinationMedium, CannedACL = S3CannedACL.PublicRead, InputStream = memoryStream }; PutObjectResponse result = client.PutObject(request); if (result == null || (result.HttpStatusCode != HttpStatusCode.OK && result.HttpStatusCode != HttpStatusCode.Created)) { throw new Exception("Error saving to amazon"); } } } // save large if (largeImage != null) { this.FlipImageIfNeeded(original, largeImage); using (System.IO.MemoryStream memoryStream = new System.IO.MemoryStream()) { largeImage.Save(memoryStream, GetCodecInfo(imageCodecName), encoderParameters); memoryStream.Position = 0; PutObjectRequest request = new PutObjectRequest() { BucketName = this.AmazonBucket, Key = destinationLarge, CannedACL = S3CannedACL.PublicRead, InputStream = memoryStream }; PutObjectResponse result = client.PutObject(request); if (result == null || (result.HttpStatusCode != HttpStatusCode.OK && result.HttpStatusCode != HttpStatusCode.Created)) { throw new Exception("Error saving to amazon"); } } } asset = this.API.Direct.Assets.GetById(asset.asset_id); // get recent if (asset != null) { if (smallImage != null) { asset.thumb_small_url = AmazonUtility.ConstructAmazonUrl(this.AmazonCloudFrontUrl, this.AmazonPublicUrl, this.AmazonBucket, destinationSmall); asset.thumb_small_dimensions = string.Format("{0}x{1}", smallImage.Width, smallImage.Height); } if (mediumImage != null) { asset.thumb_medium_url = AmazonUtility.ConstructAmazonUrl(this.AmazonCloudFrontUrl, this.AmazonPublicUrl, this.AmazonBucket, destinationMedium); asset.thumb_medium_dimensions = string.Format("{0}x{1}", mediumImage.Width, mediumImage.Height); } if (largeImage != null) { asset.thumb_large_url = AmazonUtility.ConstructAmazonUrl(this.AmazonCloudFrontUrl, this.AmazonPublicUrl, this.AmazonBucket, destinationLarge); asset.thumb_large_dimensions = string.Format("{0}x{1}", largeImage.Width, largeImage.Height); } asset.available = true; asset.resize_processing = false; asset.resize_required = false; asset.resize_status = EncoderStatus.complete.ToString(); asset.resize_log += "Resize Completed Processing on " + DateTime.UtcNow.ToString(); this.API.Direct.Assets.Update(asset); HealthReporter.Current.UpdateMetric(HealthTrackType.Each, HealthReporter.PHOTO_RESIZE_SUCCESS, 0, 1); } else { HealthReporter.Current.UpdateMetric(HealthTrackType.Each, HealthReporter.PHOTO_RESIZE_FAILED, 0, 1); } return asset; } } finally { if (smallImage != null) { smallImage.Dispose(); } if (mediumImage != null) { mediumImage.Dispose(); } if (largeImage != null) { largeImage.Dispose(); } } } finally { if (original != null) { original.Dispose(); } } } catch (Exception ex) { this.API.Direct.Assets.UpdateResizeInfo(asset.asset_id, false, EncoderStatus.not_processed.ToString(), CoreUtility.FormatException(ex)); HealthReporter.Current.UpdateMetric(HealthTrackType.Each, HealthReporter.PHOTO_RESIZE_FAILED, 0, 1); this.IFoundation.LogError(ex, "PerformProcessPhoto"); return null; } })); }
public void ObjectSamples() { { #region ListObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // List all objects ListObjectsRequest listRequest = new ListObjectsRequest { BucketName = "SampleBucket", }; ListObjectsResponse listResponse; do { // Get a list of objects listResponse = client.ListObjects(listRequest); foreach (S3Object obj in listResponse.S3Objects) { Console.WriteLine("Object - " + obj.Key); Console.WriteLine(" Size - " + obj.Size); Console.WriteLine(" LastModified - " + obj.LastModified); Console.WriteLine(" Storage class - " + obj.StorageClass); } // Set the marker property listRequest.Marker = listResponse.NextMarker; } while (listResponse.IsTruncated); #endregion } { #region GetObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObject request GetObjectRequest request = new GetObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and remember to dispose of the response using (GetObjectResponse response = client.GetObject(request)) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { string contents = reader.ReadToEnd(); Console.WriteLine("Object - " + response.Key); Console.WriteLine(" Version Id - " + response.VersionId); Console.WriteLine(" Contents - " + contents); } } #endregion } { #region GetObjectMetadata Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObjectMetadata request GetObjectMetadataRequest request = new GetObjectMetadataRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and view the response GetObjectMetadataResponse response = client.GetObjectMetadata(request); Console.WriteLine("Content Length - " + response.ContentLength); Console.WriteLine("Content Type - " + response.Headers.ContentType); if (response.Expiration != null) { Console.WriteLine("Expiration Date - " + response.Expiration.ExpiryDate); Console.WriteLine("Expiration Rule Id - " + response.Expiration.RuleId); } #endregion } { #region PutObject Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", ContentBody = "This is sample content..." }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 2 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", FilePath = "contents.txt" }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 3 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", }; using (FileStream stream = new FileStream("contents.txt", FileMode.Open)) { request.InputStream = stream; // Put object PutObjectResponse response = client.PutObject(request); } #endregion } { #region DeleteObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectRequest request = new DeleteObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request client.DeleteObject(request); #endregion } { #region DeleteObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectsRequest request = new DeleteObjectsRequest { BucketName = "SampleBucket", Objects = new List <KeyVersion> { new KeyVersion() { Key = "Item1" }, // Versioned item new KeyVersion() { Key = "Item2", VersionId = "Rej8CiBxcZKVK81cLr39j27Y5FVXghDK", }, // Item in subdirectory new KeyVersion() { Key = "Logs/error.txt" } } }; try { // Issue request DeleteObjectsResponse response = client.DeleteObjects(request); } catch (DeleteObjectsException doe) { // Catch error and list error details DeleteObjectsResponse errorResponse = doe.Response; foreach (DeletedObject deletedObject in errorResponse.DeletedObjects) { Console.WriteLine("Deleted item " + deletedObject.Key); } foreach (DeleteError deleteError in errorResponse.DeleteErrors) { Console.WriteLine("Error deleting item " + deleteError.Key); Console.WriteLine(" Code - " + deleteError.Code); Console.WriteLine(" Message - " + deleteError.Message); } } #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region ListVersions Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Turn versioning on for a bucket client.PutBucketVersioning(new PutBucketVersioningRequest { BucketName = "SampleBucket", VersioningConfig = new S3BucketVersioningConfig { Status = "Enable" } }); // Populate bucket with multiple items, each with multiple versions PopulateBucket(client, "SampleBucket"); // Get versions ListVersionsRequest request = new ListVersionsRequest { BucketName = "SampleBucket" }; // Make paged ListVersions calls ListVersionsResponse response; do { response = client.ListVersions(request); // View information about versions foreach (var version in response.Versions) { Console.WriteLine("Key = {0}, Version = {1}, IsLatest = {2}, LastModified = {3}, Size = {4}", version.Key, version.VersionId, version.IsLatest, version.LastModified, version.Size); } request.KeyMarker = response.NextKeyMarker; request.VersionIdMarker = response.NextVersionIdMarker; } while (response.IsTruncated); #endregion } { #region Multipart Upload Sample int MB = (int)Math.Pow(2, 20); // Create a client AmazonS3Client client = new AmazonS3Client(); // Define input stream Stream inputStream = Create13MBDataStream(); // Initiate multipart upload InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1" }; InitiateMultipartUploadResponse initResponse = client.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up1Response = client.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up2Response = client.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream }; UploadPartResponse up3Response = client.UploadPart(uploadRequest); // List parts for current upload ListPartsRequest listPartRequest = new ListPartsRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = client.ListParts(listPartRequest); Debug.Assert(listPartResponse.Parts.Count == 3); // Complete the multipart upload CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartETags = new List <PartETag> { new PartETag { ETag = up1Response.ETag, PartNumber = 1 }, new PartETag { ETag = up2Response.ETag, PartNumber = 2 }, new PartETag { ETag = up3Response.ETag, PartNumber = 3 } } }; CompleteMultipartUploadResponse compResponse = client.CompleteMultipartUpload(compRequest); #endregion } }
//上傳物件至bucket內 static async Task WritingAnObjectAsync() { // You specify key names for these objects. string keyName1 = "Upload1.txt"; string keyName2 = "2021/03/Upload2.csv"; try { // 1. Put object-specify only key name for the new object. var putRequest1 = new PutObjectRequest { BucketName = bucketName, Key = keyName1, ContentBody = "sample text 4444" }; PutObjectResponse response1 = await clientS3.PutObjectAsync(putRequest1); if (response1.HttpStatusCode == System.Net.HttpStatusCode.OK) { Console.WriteLine("{0}上傳成功", putRequest1.Key); } //clientS3.DeleteObjectAsync() List <TestClass> testClasses = new List <TestClass>(); testClasses.Add(new TestClass() { Name = "a123", Company = "a456", Favorite_Color = "a789" }); testClasses.Add(new TestClass() { Name = "b123", Company = "b456", Favorite_Color = "b789" }); testClasses.Add(new TestClass() { Name = "c123", Company = "c456", Favorite_Color = "c789" }); testClasses.Add(new TestClass() { Name = "d123", Company = "d456", Favorite_Color = "d789" }); // 2. Put the object-set ContentType and add metadata. var putRequest2 = new PutObjectRequest { BucketName = bucketName, Key = keyName2, //FilePath = filePath, ContentBody = testClasses.ToCsv(), ContentType = "text/csv" }; putRequest2.Metadata.Add("x-amz-meta-title", "someTitle"); PutObjectResponse response2 = await clientS3.PutObjectAsync(putRequest2); if (response2.HttpStatusCode == System.Net.HttpStatusCode.OK) { Console.WriteLine("{0}上傳成功", putRequest2.Key); } } catch (AmazonS3Exception e) { Console.WriteLine( "Error encountered ***. Message:'{0}' when writing an object" , e.Message); } catch (Exception e) { Console.WriteLine( "Unknown encountered on server. Message:'{0}' when writing an object" , e.Message); } }