public override async Task TryAwake(params object[] arg) { var listResponse = await S3Client.ListObjectsV2Async(new ListObjectsV2Request { BucketName = BucketName }); }
/// <summary> /// This method is called for every Lambda invocation. This method takes in an S3 event object and can be used /// to respond to S3 notifications. /// </summary> /// <param name="evnt"></param> /// <param name="context"></param> /// <returns></returns> public async Task <SendMessageResponse> FunctionHandler(S3Event evnt, ILambdaContext context) { var s3Event = evnt.Records?[0].S3; if (s3Event == null) { return(null); } try { SendMessageResponse result; using (var response = await S3Client.GetObjectAsync(s3Event.Bucket.Name, s3Event.Object.Key)) using (Stream responseStream = response.ResponseStream) using (var reader = new StreamReader(responseStream)) { var message = await reader.ReadToEndAsync(); result = await SqsClient.SendMessageAsync(_queueUrl, $"New Task Created:\n{message}"); } return(result); } catch (Exception e) { context.Logger.LogLine($"Error getting object {s3Event.Object.Key} from bucket {s3Event.Bucket.Name}. Make sure they exist and your bucket is in the same region as this function."); context.Logger.LogLine(e.Message); context.Logger.LogLine(e.StackTrace); throw; } }
private void PutLifeCycleConfiguration() { var request = new PutLifecycleConfigurationRequest { BucketName = AWSBucket, Configuration = LifecycleConfiguration }; var response = S3Client.PutLifecycleConfiguration(request); }
public void OnPostDelete() { StreamingLiveLib.Page page = StreamingLiveLib.Page.Load(PageId); S3Client.DeleteAsync(CachedData.S3ContentBucket, $"data/{AppUser.CurrentSite.KeyName}/page{page.Id}.html", null).Wait(); StreamingLiveLib.Page.Delete(PageId); Populate(); }
public override void Dispose() { SqsClient.Dispose(); S3Client.Dispose(); base.Dispose(); }
public void PostObject(string path, string caseID) { var stream = new FileStream(path, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); PostObjectRequest request = new PostObjectRequest() { Bucket = bucketName, Key = "case#" + caseID, InputStream = stream, CannedACL = S3CannedACL.Private, Region = _S3Region }; S3Client.PostObjectAsync(request, (responseObj) => { if (responseObj.Exception == null) { Debug.Log("Successfuly posted to Bucket"); SceneManager.LoadScene(SceneManager.GetActiveScene().name); } else { Debug.LogError("Exception occured during uploading: " + responseObj.Exception); } } ); }
public async Task FunctionHandler(SNSEvent evnt, ILambdaContext context) { // ### Level 1 - Create New Trail and Configure Lambda context.Logger.LogLine(JsonConvert.SerializeObject(evnt)); // ### Level 2 - Retrieve Logs from S3 var snsMessage = JsonConvert.DeserializeObject <S3Data>(evnt.Records[0].Sns.Message); String s3Bucket = snsMessage.s3Bucket; String s3ObjectKey = snsMessage.s3ObjectKey[0]; context.Logger.LogLine(s3Bucket); context.Logger.LogLine(s3ObjectKey); GetObjectRequest request = new GetObjectRequest { BucketName = s3Bucket, Key = s3ObjectKey }; var response = await S3Client.GetObjectAsync(request); using (Stream reader = response.ResponseStream) { var bytes = ReadStream(reader); await ExtractCloudTrailRecordsAsync(context.Logger, bytes); } // ### Level 3 - Filter for specific events and send alerts // ### Boss level - Take mitigating action }
public State ReplicateObject(State state, ILambdaContext context) { state.Message += ", Goodbye"; if (!string.IsNullOrEmpty(state.Bucket)) { state.Message += " " + state.Bucket; } try { // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = state.Bucket, SourceKey = state.Key, DestinationBucket = state.TargetBucket, DestinationKey = state.Key }; // Issue request S3Client.CopyObjectAsync(request); } catch (Exception e) { context.Logger.LogLine(e.Message); context.Logger.LogLine(e.StackTrace); state.StateException = true; } return(state); }
private void TryGetNewS3Crashes(int CrashCount) { int NewCrashCount = 0; while (NewCrashCount < CrashCount) { string SQSRecord = "<unset>"; try { if (!DequeueRecordSQS(out SQSRecord)) { // Queue empty break; } var RecordPair = SQSRecord.Split(','); if (RecordPair.Length != 2) { CrashReporterProcessServicer.WriteFailure("TryGetNewS3Crashes: bad SQS message was " + SQSRecord); CrashReporterProcessServicer.StatusReporter.IncrementCount(StatusReportingEventNames.ReadS3FileFailedEvent); continue; } string S3BucketName = RecordPair[0]; string S3Key = RecordPair[1]; string ReadableRequestString = "Bucket=" + S3BucketName + " Key=" + S3Key; var ObjectRequest = new GetObjectRequest { BucketName = S3BucketName, Key = S3Key }; using (Stream ProtocolBufferStream = new MemoryStream()) { using (GetObjectResponse ObjectResponse = S3Client.GetObject(ObjectRequest)) { using (Stream ResponseStream = ObjectResponse.ResponseStream) { if (!TryDecompResponseStream(ResponseStream, ProtocolBufferStream)) { CrashReporterProcessServicer.WriteFailure("! GZip fail in DecompResponseStream(): " + ReadableRequestString); CrashReporterProcessServicer.StatusReporter.IncrementCount(StatusReportingEventNames.ReadS3FileFailedEvent); continue; } } } NewCrashCount += UnpackRecordsFromDelimitedProtocolBuffers(ProtocolBufferStream, LandingZone, ReadableRequestString); } } catch (Exception ex) { CrashReporterProcessServicer.StatusReporter.IncrementCount(StatusReportingEventNames.ReadS3FileFailedEvent); CrashReporterProcessServicer.WriteException("TryGetNewS3Crashes: failure during processing SQS record " + SQSRecord + "\n" + ex); } } }
public async Task <Stream> GetImage(string bucketName, string objectKey, string versionId) { GetObjectRequest originalRequest = new GetObjectRequest { BucketName = bucketName, Key = objectKey }; if (!string.IsNullOrWhiteSpace(versionId)) { originalRequest.VersionId = versionId; } try { GetObjectResponse response = await S3Client.GetObjectAsync(originalRequest); // AWS HashStream doesn't support seeking so we need to copy it back to a MemoryStream MemoryStream outputStream = new MemoryStream(); response.ResponseStream.CopyTo(outputStream); outputStream.Position = 0; return outputStream; } catch (AmazonS3Exception) { // Not found if we get an exception return(null); } }
public async Task Put(string key, [FromBody] MessageModel model) { key = $"{UserId}/{model.Key}"; var stream = new MemoryStream(ASCIIEncoding.Default.GetBytes(JsonSerializer.Serialize(model))); await Request.Body.CopyToAsync(stream); stream.Position = 0; var putRequest = new PutObjectRequest { BucketName = BucketName, Key = key, InputStream = stream }; try { var response = await S3Client.PutObjectAsync(putRequest); Logger.LogInformation($"Uploaded object {key} to bucket {BucketName}. Request Id: {response.ResponseMetadata.RequestId}"); } catch (AmazonS3Exception e) { Response.StatusCode = (int)e.StatusCode; var writer = new StreamWriter(Response.Body); writer.Write(e.Message); } }
public async Task <IActionResult> UpdateProduct( [FromForm] ProductForm form, [FromServices] GetProduct getProduct, [FromServices] UpdateProduct updateProduct, [FromServices] S3Client s3Client) { var product = getProduct.Do(form.Id); product.Description = form.Description; product.Series = form.Series; product.StockDescription = form.StockDescription; if (form.Images != null && form.Images.Any()) { product.Images = new List <Image>(); var results = await Task.WhenAll(UploadFiles(s3Client, form.Images)); product.Images.AddRange(results.Select((path, index) => new Image { Index = index, Url = path, })); } await updateProduct.Update(product); return(Ok()); }
private static async Task UploadDownloadWithFluent(S3Client client, string bucketName, string objectName) { Console.WriteLine(); Console.WriteLine("Using the fluent API"); //Upload string Upload upload = client.Transfer.Upload(bucketName, objectName) .WithAccessControl(ObjectCannedAcl.PublicReadWrite) .WithCacheControl(CacheControlType.NoCache) .WithEncryption(); PutObjectResponse resp = await upload.UploadStringAsync("Hello World!", Encoding.UTF8).ConfigureAwait(false); if (resp.IsSuccess) { Console.WriteLine("Successfully uploaded the object"); //Download string Download download = client.Transfer .Download(bucketName, objectName) .WithRange(0, 10); //Adjust this to return only part of the string GetObjectResponse resp2 = await download.DownloadAsync().ConfigureAwait(false); if (resp2.IsSuccess) { Console.WriteLine("Successfully downloaded the object"); Console.WriteLine("The object contained: " + await resp2.Content.AsStringAsync().ConfigureAwait(false)); } } }
private async Task <string> UploadTile(Image <Rgba32> image, string bucket, string originalKey, ILambdaContext context) { var imageBuffer = new MemoryStream(); var resizeOptions = new ResizeOptions { Size = new SixLabors.Primitives.Size { Width = this.TileSize, Height = this.TileSize }, Mode = ResizeMode.Stretch }; image.Mutate(x => x.Resize(resizeOptions)); image.Save(imageBuffer, new SixLabors.ImageSharp.Formats.Jpeg.JpegEncoder()); imageBuffer.Position = 0; var tileImageKey = originalKey.Replace("Raw", "Tiles"); int pos = tileImageKey.LastIndexOf('.'); tileImageKey = tileImageKey.Substring(0, pos) + ".jpg"; await S3Client.PutObjectAsync(new PutObjectRequest { BucketName = bucket, Key = tileImageKey, InputStream = imageBuffer }); context.Logger.LogLine($"Tile uploaded to {tileImageKey}"); return(tileImageKey); }
public void GetObjects(Sound s) { string file = s.themeName + "/" + s.stickerName; S3Client.GetObjectAsync(S3BucketName, file, (responseObj) => { var response = responseObj.Response; if (response.ResponseStream != null) { AudioClip t = new AudioClip(); byte[] imageData = new byte[response.ResponseStream.Length]; response.ResponseStream.Read(imageData, 0, (int)imageData.Length); t.LoadAudioData(); t.name = s.stickerName; instantiateSounds(t, s); AudioClip soundInstance = Instantiate(t) as AudioClip; } else { Debug.Log("Nothing found in Bucket"); } }); }
/// <summary> /// Gets the latest backup set item by last modified date. /// </summary> /// <returns>The latest backup set.</returns> private S3Object GetLatestBackupItem() { List <S3Object> objects = new List <S3Object>(); string marker = String.Empty; bool truncated = true; while (truncated) { ListObjectsRequest request = new ListObjectsRequest() .WithBucketName(AwsConfig.BucketName) .WithPrefix(this.AwsPrefix) .WithMarker(marker); using (ListObjectsResponse response = S3Client.ListObjects(request)) { objects.AddRange(response.S3Objects); if (response.IsTruncated) { marker = objects[objects.Count - 1].Key; } else { truncated = false; } } } if (objects.Count == 0) { throw new InvalidOperationException(String.Concat("There was no backup set found for catalog \"", this.Target.CatalogName, "\".")); } return(objects.OrderByDescending(o => DateTime.Parse(o.LastModified, CultureInfo.InvariantCulture)).First()); }
public override async Task <IBlobMetadata> FetchMetadataAsync(string virtualPath, NameValueCollection queryString) { var path = ParseAndFilterPath(virtualPath); //Looks like we have to execute a head request var request = new GetObjectMetadataRequest() { BucketName = path.Bucket, Key = path.Key }; try { GetObjectMetadataResponse response = await S3Client.GetObjectMetadataAsync(request); return(new BlobMetadata() { Exists = true, LastModifiedDateUtc = response.LastModified }); } catch (AmazonS3Exception s3e) { if (s3e.StatusCode == System.Net.HttpStatusCode.NotFound || s3e.StatusCode == System.Net.HttpStatusCode.Forbidden) { return(new BlobMetadata() { Exists = false }); } else { throw; } } }
public async Task Put(string key) { // Copy the request body into a seekable stream required by the AWS SDK for .NET. var seekableStream = new MemoryStream(); await Request.Body.CopyToAsync(seekableStream); seekableStream.Position = 0; var putRequest = new PutObjectRequest { BucketName = BucketName, Key = key, InputStream = seekableStream }; try { PutObjectResponse response = await S3Client.PutObjectAsync(putRequest); Logger.LogInformation($"Uploaded object {key} to bucket {BucketName}. Request Id: {response.ResponseMetadata.RequestId}"); } catch (AmazonS3Exception e) { Response.StatusCode = (int)e.StatusCode; var writer = new StreamWriter(Response.Body); writer.Write(e.Message); } }
/// <summary> アップロード済みのファイルのハッシュデータ取得. </summary> private async Task <Dictionary <string, string> > GetUploadedObjectHashTable(S3Client s3Client, S3Object[] s3Objects) { var hashTable = new Dictionary <string, string>(); var tasks = new List <Task>(); foreach (var s3Object in s3Objects) { var task = Task.Run(async() => { var metaDataResponse = await s3Client.GetObjectMetaData(s3Object.Key); var fileHash = metaDataResponse.Metadata[MetaDataHashKey]; lock (hashTable) { hashTable[s3Object.Key] = fileHash; } }); tasks.Add(task); } await Task.WhenAll(tasks.ToArray()); return(hashTable); }
public List <S3Object> ObjectsFromKey(string key) { List <S3Object> s3Files = new List <S3Object>(); try { var request = new ListObjectsRequest { BucketName = AWSBucket, Prefix = key, Delimiter = @"/" }; do { var response = S3Client.ListObjects(request); s3Files.AddRange(response.S3Objects); if (response.IsTruncated) { request.Marker = response.NextMarker; } else { request = null; } } while (request != null); } catch (Exception e) { log.Error("ObjectsFromKey", e); } return(s3Files); }
private async Task <string> GetS3FileContent(string bucket, string key) { string responseBody = ""; try { GetObjectRequest request = new GetObjectRequest { BucketName = bucket, Key = key }; using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) using (Stream responseStream = response.ResponseStream) using (StreamReader reader = new StreamReader(responseStream)) { string title = response.Metadata["x-amz-meta-title"]; // Assume you have "title" as medata added to the object. string contentType = response.Headers["Content-Type"]; Console.WriteLine("Object metadata, Title: {0}", title); Console.WriteLine("Content type: {0}", contentType); responseBody = reader.ReadToEnd(); // Now you process the response body. } } catch (AmazonS3Exception e) { Console.WriteLine("Error encountered ***. Message:'{0}' when writing an object", e.Message); } catch (Exception e) { Console.WriteLine("Unknown encountered on server. Message:'{0}' when writing an object", e.Message); } return(responseBody); }
public async Task Post([FromBody] MessageModel model) { var stream = new MemoryStream(ASCIIEncoding.Default.GetBytes(JsonSerializer.Serialize(model))); await Request.Body.CopyToAsync(stream); stream.Position = 0; try { var key = $"{UserId}/{DateTime.Now.Ticks}.json"; await S3Client.UploadObjectFromStreamAsync( BucketName, key, stream, new Dictionary <string, object> { }); Logger.LogInformation($"Uploaded object {key} to bucket {BucketName}"); } catch (AmazonS3Exception e) { Response.StatusCode = (int)e.StatusCode; var writer = new StreamWriter(Response.Body); writer.Write(e.Message); } }
/// <summary> /// This method is called for every Lambda invocation. This method takes in an S3 event object and can be used /// to respond to S3 notifications. /// </summary> /// <param name="evnt"></param> /// <param name="context"></param> /// <returns></returns> public async Task <string> FunctionHandler(S3Event evnt, ILambdaContext context) { var s3Event = evnt.Records?[0].S3; if (s3Event == null) { return(null); } try { var response = await S3Client.GetObjectMetadataAsync(s3Event.Bucket.Name, s3Event.Object.Key); string message = $"{s3Event.Object.Key} - {s3Event.Object.Size} Bytes"; context.Logger.LogLine(message); PublishRequest request = new PublishRequest { Message = message, PhoneNumber = "+34642375554" }; var smsResponse = await SnsClient.PublishAsync(request); context.Logger.LogLine($"Response from SNS: {smsResponse.HttpStatusCode}"); return(response.Headers.ContentType); } catch (Exception e) { context.Logger.LogLine($"Error getting object {s3Event.Object.Key} from bucket {s3Event.Bucket.Name}. Make sure they exist and your bucket is in the same region as this function."); context.Logger.LogLine(e.Message); context.Logger.LogLine(e.StackTrace); throw; } }
public async Task <object> CreateProduct( [FromForm] ProductForm form, [FromServices] CreateProduct createProduct, [FromServices] S3Client s3Client) { var product = new Product { Name = form.Name, Slug = form.Name.Replace(" ", "-").ToLower(), Description = form.Description, Series = form.Series, StockDescription = form.StockDescription }; if (form.Images != null) { var results = await Task.WhenAll(UploadFiles(s3Client, form.Images)); product.Images.AddRange(results.Select((path, index) => new Image { Index = index, Url = path, })); } return(await createProduct.Do(product)); }
public async Task <object> CreateProduct( [FromForm] TempForm form, [FromServices] CreateProduct createProduct, [FromServices] S3Client s3Client) { var product = new Product { Name = form.Name, Description = form.Description, }; var results = await Task.WhenAll(UploadFiles()); product.Images.AddRange(results.Select((path, index) => new Image { Index = index, Path = path, })); return(await createProduct.Do(product)); IEnumerable <Task <string> > UploadFiles() { var index = 0; foreach (var image in form.Images) { var fileName = $"{DateTime.Now.Ticks}_{index++}{Path.GetExtension(image.FileName)}"; yield return(s3Client.SavePublicFile($"images/{fileName}", image.OpenReadStream())); } } }
private async Task LeaveGym(Facility facilityView, ApplicationUser user, Facility facility, List <UsersInGymDetail> facilityDetails, UsersInGymDetail currentFacilityDetailDb) { user.IsInsideGym = false; // if it is not 0 then we can decrement to avoid negatives if (facility.NumberOfClientsInGym != 0) { facility.NumberOfClientsInGym--; } // adjust all variables to update the user to a left gym status if (user.WillUseWeightsRoom) { facility.NumberOfClientsUsingWeightRoom--; user.WillUseWeightsRoom = false; } if (user.WillUseCardioRoom && facility.NumberOfClientsUsingCardioRoom != 0) { facility.NumberOfClientsUsingCardioRoom--; user.WillUseCardioRoom = false; } if (user.WillUseStretchRoom && facility.NumberOfClientsUsingStretchRoom != 0) { facility.NumberOfClientsUsingStretchRoom--; user.WillUseWeightsRoom = false; } // if there are entries for facilities, loop through all the facilities, remove the entry which is stamped with the current user entry if (facilityDetails.Count() > 0) { _facilityContext.UsersInGymDetails.Remove(currentFacilityDetailDb); } facilityView.IsCameraScanSuccessful = false; user.IsWithin10m = false; user.IsCameraScanSuccessful = false; user.AccessGrantedToFacility = false; // delete detected image from S3 bucket try { string keyName = $"{user.FirstName}_{user.Id}.jpg"; var deleteObjectRequest = new Amazon.S3.Model.DeleteObjectRequest { BucketName = bucket, Key = keyName }; await S3Client.DeleteObjectAsync(deleteObjectRequest); } catch (AmazonS3Exception e) { _logger.LogInformation(e.Message); } catch (Exception e) { _logger.LogInformation(e.Message); } }
public static void Main(string[] args) { RegionEndpoint endpoint = RegionEndpoint.EUWest1; S3Client client = new S3Client(endpoint); Console.WriteLine("Press <ENTER> To Continue..."); Console.ReadLine(); }
static void Main(string[] args) { configuration = new ConfigurationBuilder() .AddJsonFile("appsettings.json", optional: true) .Build(); CreateLogger(); logger.Info("Process Started"); var c = new S3Client(configuration, args[0]); var s3Service = new S3Service(c.s3Client, c.BucketName, c.BucketRegion, configuration, logger); var action = args[1].ToUpper(); if (action == "LIST") { var files = s3Service.ListFiles(args.Length > 2 ? args[2] : ""); foreach (var f in files.OrderByDescending(x => x.LastModified)) { Console.WriteLine($"File Name: {f.Key} Size: {f.Size / 1024 / 1024} Modified Date: {f.LastModified}"); } Console.WriteLine($"Total Files:{files.Count}"); } if (action == "UPLOAD") { s3Service.UploadFiles(args); } if (action == "DOWNLOAD") { s3Service.DownloadFiles(args[2], args.Length > 3 ? args[3] : ""); } if (action == "TRANSFER") { s3Service.TransferFilesBetweenS3(args); } if (action == "COPY") { s3Service.CopyFilesBetweenFolders(args); } if (action == "DELETE") { s3Service.DeleteFiles(args); } if (action == "AI") { ProcessAIModelFiles(s3Service, args); } logger.Info("Process Ended."); }
private static async Task Main(string[] args) { IConfigurationRoot configRoot = new ConfigurationBuilder() .AddJsonFile("Config.json", false) .Build(); ServiceCollection services = new ServiceCollection(); services.Configure <S3Config>(configRoot); IS3ClientBuilder clientBuilder = services.AddSimpleS3((s3Config, provider) => configRoot.Bind(s3Config)); string profileName = configRoot["ProfileName"]; clientBuilder.CoreBuilder.UseProfileManager() .BindConfigToProfile(profileName) .UseDataProtection(); IConfigurationSection proxySection = configRoot.GetSection("Proxy"); if (proxySection != null && proxySection["UseProxy"].Equals("true", StringComparison.OrdinalIgnoreCase)) { clientBuilder.HttpBuilder.WithProxy(proxySection["ProxyAddress"]); } using (ServiceProvider serviceProvider = services.BuildServiceProvider()) { IProfileManager manager = serviceProvider.GetRequiredService <IProfileManager>(); IProfile? profile = manager.GetProfile(profileName); //If profile is null, then we do not yet have a profile stored on disk. We use ConsoleSetup as an easy and secure way of asking for credentials if (profile == null) { ConsoleSetup.SetupProfile(manager, profileName); } S3Client client = serviceProvider.GetRequiredService <S3Client>(); await foreach (S3Bucket bucket in client.ListAllBucketsAsync()) { if (!bucket.Name.StartsWith("testbucket-", StringComparison.OrdinalIgnoreCase)) { continue; } DeleteAllObjectsStatus objDelResp = await client.DeleteAllObjectsAsync(bucket.Name).ConfigureAwait(false); if (objDelResp == DeleteAllObjectsStatus.Ok) { await client.DeleteBucketAsync(bucket.Name).ConfigureAwait(false); } } //Empty the main test bucket await client.DeleteAllObjectsAsync(configRoot["BucketName"]).ConfigureAwait(false); } }
public override IList <AsimovVersion> GetAvailableVersions(PackageInfo packageInfo) { var prefix = packageInfo.SourceRelativePath != null ? $"{Prefix}/{packageInfo.SourceRelativePath}" : Prefix; var objects = S3Client.ListObjects(Bucket, prefix); return(objects.S3Objects.Select(x => ParseVersion(x.Key, x.LastModified)).Where(x => x != null).ToList()); }
public async Task<JsonResult> Index(long id) { var game = await this.db.Games.Where(x => x.Id.Equals(id)).Include(x => x.ChallengerOne).Include(x => x.ChallengerTwo).FirstAsync(); IS3Provider<S3ApiModel> amazon = new S3Client(); Stream testStream = new MemoryStream(); Pdf.CreateDocument(this.GetViewHtml(game, "_pdfPartial"), testStream); var obj = await amazon.CreateRequest(testStream, ".pdf").SaveObject(); return Json(new ApiResponse(200, obj), JsonRequestBehavior.AllowGet); }
public async Task<IHttpActionResult> S3UploadTest() { IS3Provider<S3ApiModel> amazon = new S3Client(); Stream testStream = new MemoryStream(); byte[] data = File.ReadAllBytes(HttpContext.Current.Server.MapPath("~/test.pdf")); await testStream.WriteAsync(data, 0, data.Count()); var obj = await amazon.CreateRequest(testStream, ".pdf").SaveObject(); return this.Ok(new ApiResponse(200, obj)); }