public void GetObjects(Sound s) { string file = s.themeName + "/" + s.stickerName; S3Client.GetObjectAsync(S3BucketName, file, (responseObj) => { var response = responseObj.Response; if (response.ResponseStream != null) { AudioClip t = new AudioClip(); byte[] imageData = new byte[response.ResponseStream.Length]; response.ResponseStream.Read(imageData, 0, (int)imageData.Length); t.LoadAudioData(); t.name = s.stickerName; instantiateSounds(t, s); AudioClip soundInstance = Instantiate(t) as AudioClip; } else { Debug.Log("Nothing found in Bucket"); } }); }
/// <summary> /// This method is called for every Lambda invocation. This method takes in an S3 event object and can be used /// to respond to S3 notifications. /// </summary> /// <param name="evnt"></param> /// <param name="context"></param> /// <returns></returns> public async Task <SendMessageResponse> FunctionHandler(S3Event evnt, ILambdaContext context) { var s3Event = evnt.Records?[0].S3; if (s3Event == null) { return(null); } try { SendMessageResponse result; using (var response = await S3Client.GetObjectAsync(s3Event.Bucket.Name, s3Event.Object.Key)) using (Stream responseStream = response.ResponseStream) using (var reader = new StreamReader(responseStream)) { var message = await reader.ReadToEndAsync(); result = await SqsClient.SendMessageAsync(_queueUrl, $"New Task Created:\n{message}"); } return(result); } catch (Exception e) { context.Logger.LogLine($"Error getting object {s3Event.Object.Key} from bucket {s3Event.Bucket.Name}. Make sure they exist and your bucket is in the same region as this function."); context.Logger.LogLine(e.Message); context.Logger.LogLine(e.StackTrace); throw; } }
public async Task FunctionHandler(SNSEvent evnt, ILambdaContext context) { // ### Level 1 - Create New Trail and Configure Lambda context.Logger.LogLine(JsonConvert.SerializeObject(evnt)); // ### Level 2 - Retrieve Logs from S3 var snsMessage = JsonConvert.DeserializeObject <S3Data>(evnt.Records[0].Sns.Message); String s3Bucket = snsMessage.s3Bucket; String s3ObjectKey = snsMessage.s3ObjectKey[0]; context.Logger.LogLine(s3Bucket); context.Logger.LogLine(s3ObjectKey); GetObjectRequest request = new GetObjectRequest { BucketName = s3Bucket, Key = s3ObjectKey }; var response = await S3Client.GetObjectAsync(request); using (Stream reader = response.ResponseStream) { var bytes = ReadStream(reader); await ExtractCloudTrailRecordsAsync(context.Logger, bytes); } // ### Level 3 - Filter for specific events and send alerts // ### Boss level - Take mitigating action }
private async Task <string> GetS3FileContent(string bucket, string key) { string responseBody = ""; try { GetObjectRequest request = new GetObjectRequest { BucketName = bucket, Key = key }; using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) using (Stream responseStream = response.ResponseStream) using (StreamReader reader = new StreamReader(responseStream)) { string title = response.Metadata["x-amz-meta-title"]; // Assume you have "title" as medata added to the object. string contentType = response.Headers["Content-Type"]; Console.WriteLine("Object metadata, Title: {0}", title); Console.WriteLine("Content type: {0}", contentType); responseBody = reader.ReadToEnd(); // Now you process the response body. } } catch (AmazonS3Exception e) { Console.WriteLine("Error encountered ***. Message:'{0}' when writing an object", e.Message); } catch (Exception e) { Console.WriteLine("Unknown encountered on server. Message:'{0}' when writing an object", e.Message); } return(responseBody); }
public async Task <Stream> GetImage(string bucketName, string objectKey, string versionId) { GetObjectRequest originalRequest = new GetObjectRequest { BucketName = bucketName, Key = objectKey }; if (!string.IsNullOrWhiteSpace(versionId)) { originalRequest.VersionId = versionId; } try { GetObjectResponse response = await S3Client.GetObjectAsync(originalRequest); // AWS HashStream doesn't support seeking so we need to copy it back to a MemoryStream MemoryStream outputStream = new MemoryStream(); response.ResponseStream.CopyTo(outputStream); outputStream.Position = 0; return outputStream; } catch (AmazonS3Exception) { // Not found if we get an exception return(null); } }
/// <summary> /// This method is called for every Lambda invocation. This method takes in an S3 event object and can be used /// to respond to S3 notifications. /// </summary> /// <param name="evnt"></param> /// <param name="context"></param> /// <returns></returns> public async Task <string> FunctionHandler(S3Event evnt, ILambdaContext context) { var s3Event = evnt.Records?[0].S3; if (s3Event == null) { return(null); } try { //var response = await this.S3Client.GetObjectMetadataAsync(s3Event.Bucket.Name, s3Event.Object.Key); var response = await S3Client.GetObjectAsync(s3Event.Bucket.Name, s3Event.Object.Key); using (var filereader = new StreamReader(response.ResponseStream)) { String s3object = await filereader.ReadToEndAsync(); XmlReaderSettings settings = new XmlReaderSettings(); byte[] byteArray = Encoding.ASCII.GetBytes(s3object); MemoryStream stream = new MemoryStream(byteArray); XmlReader xmlReaderS3object = XmlReader.Create(stream); System.IO.File.WriteAllText("/tmp/books.xsd", stxsd); string curFile = "/tmp/books.xsd"; //context.Logger.LogLine(File.Exists(curFile) ? "File exists." : "File does not exist."); context.Logger.LogLine("Validating " + s3Event.Object.Key); context.Logger.LogLine("Schema File Name: " + schemaName); //settings.Schemas.Add("urn:books", "/tmp/books.xsd"); settings.Schemas.Add(Schema_Target, curFile); settings.CheckCharacters = true; settings.ValidationType = ValidationType.Schema; XmlReader reader = XmlReader.Create(xmlReaderS3object, settings); XmlDocument document = new XmlDocument(); try { document.Load(reader); ValidationEventHandler eventHandler = new ValidationEventHandler(ValidationCallBack); document.Validate(eventHandler); return("good"); } catch (Exception e) { context.Logger.LogLine(e.Message); return("bad"); }; } } catch (Exception e) { context.Logger.LogLine(e.Message); context.Logger.LogLine(e.StackTrace); throw; } }
public async Task FunctionHandler(S3Event evnt, ILambdaContext context) { context.Logger.LogLine($"Received event with {evnt.Records.Count} records"); foreach (var record in evnt.Records) { var bucket = record.S3.Bucket.Name; var originalKey = System.Net.WebUtility.UrlDecode(record.S3.Object.Key); // One possible fix to filter out non-image file types, however relying on // extension is really not 100% accurate or future proof - we should instead // catch the exception during moderation and skip the file. var fileExt = Path.GetExtension(originalKey).TrimStart('.'); if (!_validImageExtensions.Contains(fileExt)) { context.Logger.LogLine($"File {originalKey} does not have a recognized image extension, skipping"); continue; } context.Logger.LogLine($"Processing s3://{bucket}/{originalKey}"); var tokens = originalKey.Split('/'); var galleryId = tokens[tokens.Length - 2]; context.Logger.LogLine($"GalleryId: {galleryId}"); if (!(await IsImageSafe(bucket, originalKey, context))) { context.Logger.LogLine("Image suspected to be inappropriate and skipped."); continue; } context.Logger.LogLine("Image passed moderation test"); var tmpPath = Path.Combine("/tmp/", Path.GetFileName(originalKey)); try { context.Logger.LogLine("Saving image to tmp"); using (var response = await S3Client.GetObjectAsync(bucket, originalKey)) { await response.WriteResponseStreamToFileAsync(tmpPath, false, default(CancellationToken)); } context.Logger.LogLine("Reading image"); using (var sourceImage = Image.Load(tmpPath)) { var imageInfo = GetAverageColor(sourceImage, context); context.Logger.LogLine($"Width: {sourceImage.Width}, Height: {sourceImage.Height} TL: {imageInfo.AverageTL}, TR: {imageInfo.AverageTR}, BL: {imageInfo.AverageBL}, BR: {imageInfo.AverageBR}"); var tileKey = await UploadTile(sourceImage, bucket, originalKey, context); await SaveToTable(galleryId, tileKey, imageInfo); } } finally { File.Delete(tmpPath); } } }
public async Task <string> GetObject(string bucket, string key) { var response = await S3Client.GetObjectAsync(bucket, key); using (var reader = new StreamReader(response.ResponseStream)) { String s3object = await reader.ReadToEndAsync(); return(s3object); } }
public async Task <string> GetObject(string bucket, string key) { //var response = await S3Client.GetObjectAsync("siri-lambda-test", "books.xsd"); var response = await S3Client.GetObjectAsync(bucket, key); using (var reader = new StreamReader(response.ResponseStream)) { String s3object = await reader.ReadToEndAsync(); return(s3object); } }
public async Task <MetaData> GenerateThumbnail(MetaData metaData, ILambdaContext context) { metaData.Thumbnail = new Thumbnail { BucketName = "image-thumbnails-lab04", ImageName = "resized-" + metaData.ImageName, ContentType = metaData.ContentType }; using (var response = await S3Client.GetObjectAsync(metaData.BucketName, metaData.ImageName)) using (var responseStream = response.ResponseStream) using (var stream = new MemoryStream()) { responseStream.CopyTo(stream); stream.Position = 0; GcBitmap bitmap = new GcBitmap(stream); var newHeight = 120; var newWidth = ImageUtilities.ScaleWidth((int)bitmap.Height, newHeight, (int)bitmap.Width); var resizedBitmap = bitmap.Resize(newWidth, newHeight, InterpolationMode.NearestNeighbor); var ms = new MemoryStream(); resizedBitmap.SaveAsJpeg(ms); PutObjectRequest putRequest = new PutObjectRequest { BucketName = metaData.Thumbnail.BucketName, Key = metaData.Thumbnail.ImageName, InputStream = ms, ContentType = metaData.Thumbnail.ContentType, }; // Put object PutObjectResponse putResponse = await S3Client.PutObjectAsync(putRequest); }; // Get url Address GetPreSignedUrlRequest requestUrl = new GetPreSignedUrlRequest(); requestUrl.BucketName = metaData.Thumbnail.BucketName; requestUrl.Key = metaData.Thumbnail.ImageName; requestUrl.Expires = DateTime.Now.Add(new TimeSpan(1, 0, 0, 0)); string Url = S3Client.GetPreSignedURL(requestUrl); metaData.Thumbnail.Url = Url.Substring(0, Url.IndexOf('?')); return(metaData); }
public async Task <string> DownloadAsync(string fullFileName, string localFileName, CancellationToken cancellationToken = default) { CheckDisposed(); if (string.IsNullOrWhiteSpace(fullFileName)) { throw new ArgumentNullException(nameof(fullFileName)); } if (string.IsNullOrWhiteSpace(localFileName)) { throw new ArgumentNullException(nameof(localFileName)); } if (File.Exists(localFileName)) { throw new S3BucketException( $"'{localFileName}' local file name defined for the requested '{fullFileName}' S3 file already exists."); } try { var dir = Path.GetDirectoryName(localFileName); if (!(dir is null)) { _ = Directory.CreateDirectory(dir); } fullFileName = AdjustKey(fullFileName); var request = new GetObjectRequest { BucketName = BucketName, Key = fullFileName }; using (var response = await S3Client.GetObjectAsync(request, cancellationToken).ConfigureAwait(false)) { await response.WriteResponseStreamToFileAsync(localFileName, false, cancellationToken) .ConfigureAwait(false); return(localFileName); } } catch (Exception ex) { throw new S3BucketException( $"Exception during s3 download BucketName = {BucketName} S3 FileName = {fullFileName} Local FileName = {localFileName}", ex); } }
public async Task <HttpResponseMessage> Get([FromQuery] BucketItem item) { GetObjectResponse response = await S3Client.GetObjectAsync(item.BucketName, item.Key); HttpResponseMessage responseMessage = new HttpResponseMessage(System.Net.HttpStatusCode.OK); responseMessage.Content = new StreamContent(response.ResponseStream); //responseMessage.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream"); //responseMessage.Content.Headers.ContentLength = response.ContentLength; foreach (string headerName in response.Headers.Keys) { responseMessage.Headers.Add(headerName, response.Headers[headerName]); } return(responseMessage); }
public async Task <byte[]> GetFile(string key) { var getRequest = new GetObjectRequest() { Key = key, BucketName = Config.BucketName }; var str = await S3Client.GetObjectAsync(getRequest); var arr = StreamUtilities.ReadFully(str.ResponseStream); return(arr); }
public async Task FunctionHandler(SNSEvent snsEvent, ILambdaContext context) { // ### Level 1 - Create New Trail and Configure Lambda context.Logger.LogLine(JsonConvert.SerializeObject(snsEvent)); // ### Level 2 - Retrieve Logs from S3 var crMessage = JsonConvert.DeserializeObject <CloudTrailModel>(snsEvent.Records.First().Sns.Message); var bucketName = crMessage.s3Bucket; var fileNames = crMessage.s3ObjectKey; var records = new HashSet <CloudTrailEvent>(); foreach (string fileName in fileNames) { var response = S3Client.GetObjectAsync( new GetObjectRequest { BucketName = bucketName, Key = fileName }).Result; var fileBytes = ReadStream(response.ResponseStream); var record = ExtractCloudTrailRecordsAsync(context.Logger, fileBytes).Result; foreach (var cloudTrailEvent in record.Records) { records.Add(cloudTrailEvent); } context.Logger.LogLine(record.Records.First().EventName); } ; // ### Level 3 - Filter for specific events and send alerts var createUserEvents = records.Where(x => x.EventName == "CreateUser"); await SnsClient.PublishAsync(new PublishRequest { Message = JsonConvert.SerializeObject(createUserEvents), TopicArn = AlertTopicArn }); // ### Boss level - Take mitigating action foreach (var userEvent in createUserEvents) { var username = userEvent.RequestParameters["userName"] as String; context.Logger.LogLine($"Deleting user: {username}"); await IamClient.DeleteUserAsync(new DeleteUserRequest { UserName = username }); } }
public async Task FunctionHandler(SNSEvent evnt, ILambdaContext context) { // ### Level 1 - Create New Trail and Configure Lambda // context.Logger.LogLine(JsonConvert.SerializeObject(evnt)); // ### Level 2 - Retrieve Logs from S3 CloudTrailMessage message = JsonConvert.DeserializeObject <CloudTrailMessage>(evnt.Records[0].Sns.Message); context.Logger.LogLine($"Bucket: {message.S3Bucket} Object key: {message.S3ObjectKey[0]}"); GetObjectRequest request = new GetObjectRequest { BucketName = message.S3Bucket, Key = message.S3ObjectKey[0] }; byte[] responseByteArray; using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) { using (Stream responseStream = response.ResponseStream) using (var memoryStream = new MemoryStream()) { responseStream.CopyTo(memoryStream); responseByteArray = memoryStream.ToArray(); } } var records = await ExtractCloudTrailRecordsAsync(context.Logger, responseByteArray); // ### Level 3 - Filter for specific events and send alerts] foreach (var r in records.Records) { if (r.EventName == "CreateUser") { context.Logger.LogLine($"Super cool event '{r.EventName}'"); var publishRequest = new PublishRequest { Subject = "Super cool event ALERT", Message = $"You have a super cool event \n {r.EventName} for user {r.RequestParameters["userName"]}", TopicArn = CreateUserTopicArn }; await SnsClient.PublishAsync(publishRequest); } } // ### Boss level - Take mitigating action }
/// <summary> /// Writes the content for the specified media content to the given stream. /// </summary> /// <param name="id">The unique id</param> /// <param name="stream">The output stream</param> /// <returns>If the media was found</returns> public async Task <bool> GetAsync(string id, Stream stream) { var objectKey = Url.Combine(StorageOptions.KeyPrefix, id); try { using (var getObjectResponse = await S3Client.GetObjectAsync(StorageOptions.BucketName, objectKey)) using (var responseStream = getObjectResponse.ResponseStream) { responseStream.CopyTo(stream); } return(true); } catch (Exception) { return(false); } }
public async Task FunctionHandler(SNSEvent evnt, ILambdaContext context) { // ### Level 1 - Create New Trail and Configure Lambda context.Logger.LogLine(JsonConvert.SerializeObject(evnt)); // ### Level 2 - Retrieve Logs from S3 var msg = JsonConvert.DeserializeObject <CloudTrailMessage>(evnt.Records[0].Sns.Message); foreach (var objectKey in msg.S3ObjectKey) { GetObjectRequest request = new GetObjectRequest { BucketName = msg.S3Bucket, Key = objectKey }; using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) using (MemoryStream data = new MemoryStream()) { await response.ResponseStream.CopyToAsync(data); var records = await ExtractCloudTrailRecordsAsync(context.Logger, data.ToArray()); context.Logger.LogLine(JsonConvert.SerializeObject(records)); foreach (var record in records.Records) { if (record.EventName == "CreateUser") { PublishRequest req = new PublishRequest { Message = JsonConvert.SerializeObject(record), Subject = ("Suspicious Alert"), TopicArn = ("arn:aws:sns:us-west-2:068486113320:suspicious") }; PublishResponse result = await SnsClient.PublishAsync(req); } } } } // ### Level 3 - Filter for specific events and send alerts // ### Boss level - Take mitigating action }
public async Task Get(string key) { try { GetObjectResponse getResponse = await S3Client.GetObjectAsync( new GetObjectRequest { BucketName = BucketName, Key = key }); Response.ContentType = getResponse.Headers.ContentType; getResponse.ResponseStream.CopyTo(Response.Body); } catch (AmazonS3Exception e) { Response.StatusCode = (int)e.StatusCode; var writer = new StreamWriter(Response.Body); writer.Write(e.Message); } }
private async Task Process(ClassificationModel classification) { Console.WriteLine($"Re-processing source: {classification.Source} page id: {classification.PageId}"); if (classification.ModerationLabels != null && !classification.Nudity.HasValue) { classification.Nudity = classification.ModerationLabels.Any(x => x.Name.Contains("nudity", StringComparison.OrdinalIgnoreCase) || x.ParentName.Contains("nudity", StringComparison.OrdinalIgnoreCase) ); } classification.S3Bucket = Constants.IMAGES_BUCKET; var objectImage = S3Client.GetObjectAsync(Constants.IMAGES_BUCKET, $"{classification.S3Path}").Result; if (objectImage.ContentLength == 0) { new ReviewProcess().MoveForReview(DbClient, ElasticSearchClient, S3Client, classification); return; } byte[] imageBytes; await using (var stream = objectImage.ResponseStream) await using (var memoryStream = new MemoryStream()) { await stream.CopyToAsync(memoryStream); imageBytes = memoryStream.ToArray(); } using var image = Image.Load(imageBytes); classification.Height = image.Height; classification.Width = image.Width; classification.Orientation = image.Height >= image.Width ? Constants.ORIENTATION_PORTRAIT : Constants.ORIENTATION_LANDSCAPE; var json = JObject.FromObject(classification, new JsonSerializer { NullValueHandling = NullValueHandling.Ignore }); await ElasticSearchClient.SendToElasticSearch(classification); await DbClient.PutItemAsync( new ClassificationModel().GetTable(), Document.FromJson(json.ToString()).ToAttributeMap() ); }
public override async Task <Stream> OpenAsync(string virtualPath, NameValueCollection queryString) { var path = ParseAndFilterPath(virtualPath); var time = Stopwatch.StartNew(); long bytesFetched = 0; //Synchronously download to memory stream try { var req = new Amazon.S3.Model.GetObjectRequest() { BucketName = path.Bucket, Key = path.Key }; using (var s = await S3Client.GetObjectAsync(req)){ using (var stream = s.ResponseStream) { var copy = (Stream)await stream.CopyToMemoryStreamAsync(); bytesFetched = copy.Length; return(copy); } } } catch (AmazonS3Exception se) { if (se.StatusCode == System.Net.HttpStatusCode.NotFound || "NoSuchKey".Equals(se.ErrorCode, StringComparison.OrdinalIgnoreCase)) { throw new FileNotFoundException("Amazon S3 file not found", se); } else if (se.StatusCode == System.Net.HttpStatusCode.Forbidden || "AccessDenied".Equals(se.ErrorCode, StringComparison.OrdinalIgnoreCase)) { throw new FileNotFoundException("Amazon S3 access denied - file may not exist", se); } else { throw; } } finally { time.Stop(); this.ReportReadTicks(time.ElapsedTicks, bytesFetched); } }
public async Task <string> FunctionHandler(S3Event evnt, ILambdaContext context) { var s3Event = evnt.Records?[0].S3; if (s3Event == null) { return(null); } string curFile = "/tmp/schema.xsd"; System.IO.File.WriteAllText(curFile, stxsd); var response = await S3Client.GetObjectAsync(s3Event.Bucket.Name, s3Event.Object.Key); using (var filereader = new StreamReader(response.ResponseStream)) { string s3object = await filereader.ReadToEndAsync(); XmlReaderSettings settings = new XmlReaderSettings(); byte[] byteArray = Encoding.ASCII.GetBytes(s3object); MemoryStream stream = new MemoryStream(byteArray); XmlReader xmlReaderS3Object = XmlReader.Create(stream); context.Logger.LogLine("Validating " + s3Event.Object.Key); context.Logger.LogLine("Schema File Name: " + SchemaName); settings.Schemas.Add(Schema_Target, curFile); settings.CheckCharacters = true; settings.ValidationType = ValidationType.Schema; //settings.ValidationEventHandler += new ValidationEventHandler(booksSettingsValidationEventHandler); try { XmlReader reader = XmlReader.Create(xmlReaderS3Object, settings); while (reader.Read()) { } return("good right?"); } catch (Exception e) { context.Logger.LogLine(e.Message); return("bad"); } } }
private async Task <string> GetS3FileContent(string bucket, string key) { string responseBody = ""; try { GetObjectRequest request = new GetObjectRequest { BucketName = bucket, Key = key }; using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) using (Stream responseStream = response.ResponseStream) using (StreamReader reader = new StreamReader(responseStream)) { responseBody = reader.ReadToEnd(); } } catch (AmazonS3Exception e) { Console.WriteLine("S3 Exception encountered when writing object: {0}", e.Message); } catch (Exception e) { Console.WriteLine("Unknown Exception encountered when writing an object: {0}", e.Message); } return(responseBody); }
private async Task <Dictionary <string, object> > GetFromS3(string s3Key) { try { var response = await S3Client.GetObjectAsync(BucketName, s3Key); if (response.HttpStatusCode == HttpStatusCode.OK) { using (var reader = new JsonTextReader(new StreamReader(response.ResponseStream))) { return(Serializer.Deserialize <Dictionary <string, object> >(reader)); } } return(null); } catch (AmazonS3Exception) { return(null); } }
public async Task FunctionHandler(SNSEvent evnt, ILambdaContext context) { // ### Level 1 - Create New Trail and Configure Lambda context.Logger.LogLine(JsonConvert.SerializeObject(evnt)); // ### Level 2 - Retrieve Logs from S3 var message = JsonConvert.DeserializeObject <CloudTrailMessage>(evnt.Records.First().Sns.Message); LambdaLogger.Log($"S3 Bucket: {message.S3Bucket}"); LambdaLogger.Log($"S3 Object Key: {message.S3ObjectKey.First()}"); var objectResponse = await S3Client.GetObjectAsync(message.S3Bucket, message.S3ObjectKey.First()); var data = ReadFully(objectResponse.ResponseStream); var records = await ExtractCloudTrailRecordsAsync(context.Logger, data); foreach (var record in records.Records) { LambdaLogger.Log("Another FooBar record"); LambdaLogger.Log(JsonConvert.SerializeObject(record)); //LambdaLogger.Log($"{record.EventName}: {record.EventTime} - {record.SourceIpAddress}"); } // ### Level 3 - Filter for specific events and send alerts var alerts = records.Records.Where(r => r.EventName == "CreateUser" && r.RequestParameters.Any(p => p.Key == "userName" && p.Value.ToString().StartsWith("foo", StringComparison.OrdinalIgnoreCase))); if (alerts.Any()) { var response = await SnsClient.PublishAsync("arn:aws:sns:us-west-2:481999251613:FooBar", "People be creating some Foo users..."); LambdaLogger.Log("Send Sns Message"); // ### Boss level - Take mitigating action await BossAsync(alerts); } else { LambdaLogger.Log("No alerts were found this time... that was close..."); } }
public void GetObjects(Sticker s) { string file = s.themeName + "/" + s.stickerName; S3Client.GetObjectAsync(S3BucketName, file, (responseObj) => { var response = responseObj.Response; if (response.ResponseStream != null) { Texture2D t = new Texture2D(4, 4); byte[] imageData = new byte[response.ResponseStream.Length]; response.ResponseStream.Read(imageData, 0, (int)imageData.Length); t.LoadImage(imageData); t.name = s.stickerName; instantiateTexture(t, s); } else { Debug.Log("Nothing found in Bucket"); } }); }
public async Task <JsonResult> Get() { var keys = await S3Client.GetAllObjectKeysAsync(BucketName, UserId, new Dictionary <string, object> { }); try { var tasks = new List <Task <GetObjectResponse> >(); foreach (var key in keys) { tasks.Add(S3Client.GetObjectAsync(new GetObjectRequest { BucketName = BucketName, Key = key })); } Task.WaitAll(tasks.ToArray()); var models = new List <MessageModel> { }; foreach (var task in tasks) { StreamReader reader = new StreamReader(task.Result.ResponseStream); string rawJson = reader.ReadToEnd(); var model = JsonSerializer.Deserialize <MessageModel>(rawJson); var k = task.Result.Key.Split('/'); model.Key = k[k.Length - 1]; models.Add(model); } Response.ContentType = "text/json"; return(new JsonResult(models)); } catch (AmazonS3Exception e) { Response.StatusCode = (int)e.StatusCode; return(new JsonResult(e.Message)); } }
/// <summary> /// Base S3 Retrieval method; returns the contents of the file in string format. /// </summary> /// <param name="request">A GetObjectRequest containing BucketName and Key values</param> /// <returns></returns> public virtual async Task <string> GetObject(GetObjectRequest request) { string content = string.Empty; try { using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) { using (Stream responseStream = response.ResponseStream) { using (var reader = new StreamReader(response.ResponseStream)) { if (response.HttpStatusCode != HttpStatusCode.OK) { // do what here? log warning once I get CloudWatch or equivalent set up } content = await reader.ReadToEndAsync(); } } } } catch (AmazonS3Exception s3Ex) { Logger.Log(ExceptionLogFormatter.FormatExceptionLogMessage(request, s3Ex)); content = null; } catch (Exception ex) { Logger.Log(ExceptionLogFormatter.FormatExceptionLogMessage(ex)); content = null; } return(content); }
private static Task <GetObjectResponse> DownloadObject(S3Client client, string bucketName, string objectName) { return(client.GetObjectAsync(bucketName, objectName)); }
/// <summary> /// This method is called for every Lambda invocation. This method takes in an S3 event object and can be used /// to respond to S3 notifications. /// </summary> /// <param name="evnt"></param> /// <param name="context"></param> /// <returns></returns> public async Task FunctionHandler(S3Event evnt, ILambdaContext context) { if (!File.Exists("/tmp/ffmpeg")) { ExecuteCommand("cp /var/task/ffmpeg /tmp/.; chmod 755 /tmp/ffmpeg;"); } foreach (var record in evnt.Records) { Console.WriteLine($"New S3 Object {record.S3.Bucket.Name}:{record.S3.Object.Key}"); if (!File.Exists($"/tmp/{record.S3.Object.Key}")) { var request = new GetObjectRequest { BucketName = record.S3.Bucket.Name, Key = record.S3.Object.Key }; using (var response = await S3Client.GetObjectAsync(request)) { await response.WriteResponseStreamToFileAsync($"/tmp/{record.S3.Object.Key}", false, CancellationToken.None); } } try { ExecuteCommand($"{PreCmd}"); Console.WriteLine("Transcoding..."); var arg = $"-y -i /tmp/{record.S3.Object.Key} " + $"{FilterPara} " + $"{VideoEncPara} " + $"{AudioEncPara} " + $"{OptsPara} " + $"/tmp/{record.S3.Object.Key}.mp4"; Console.WriteLine($"ffmpeg {arg}"); var ffmpegProc = new Process { StartInfo = new ProcessStartInfo { FileName = "/tmp/ffmpeg", Arguments = arg, RedirectStandardOutput = true, RedirectStandardError = true, RedirectStandardInput = true, CreateNoWindow = true }, EnableRaisingEvents = true }; // see below for output handler ffmpegProc.ErrorDataReceived += StandardIOHandler; ffmpegProc.OutputDataReceived += StandardIOHandler; ffmpegProc.Start(); ffmpegProc.BeginErrorReadLine(); ffmpegProc.BeginOutputReadLine(); ffmpegProc.WaitForExit(); Console.WriteLine("Transcode Successfully"); Console.WriteLine("Upload to S3"); await S3Client.PutObjectAsync(new PutObjectRequest { BucketName = "com.moiamond.ffpoc", Key = $"{record.S3.Object.Key}.mp4", FilePath = $"/tmp/{record.S3.Object.Key}.mp4" }); File.Delete($"/tmp/{record.S3.Object.Key}"); File.Delete($"/tmp/{record.S3.Object.Key}.mp4"); ExecuteCommand($"{PostCmd}"); } catch (Exception e) { Console.WriteLine(e.Message); } finally { Console.WriteLine("final"); } } }
public void GetList(string caseNumber, Action onComplete = null) { Debug.Log("AWSMANAGER::GetList()"); string target = "case#" + caseNumber; var request = new ListObjectsRequest() { BucketName = bucketName }; S3Client.ListObjectsAsync(request, (response) => { if (response.Exception == null) { bool casefound = response.Response.S3Objects.Any(obj => obj.Key == target); if (casefound == true) { Debug.Log("Found Case File"); S3Client.GetObjectAsync(bucketName, target, (responseObj) => { if (responseObj.Response.ResponseStream != null) { byte[] data = null; using (StreamReader reader = new StreamReader(responseObj.Response.ResponseStream)) { using (MemoryStream memory = new MemoryStream()) { var buffer = new byte[512]; var bytesRead = default(int); while ((bytesRead = reader.BaseStream.Read(buffer, 0, buffer.Length)) > 0) { memory.Write(buffer, 0, bytesRead); } data = memory.ToArray(); } } using (MemoryStream memory = new MemoryStream(data)) { BinaryFormatter bf = new BinaryFormatter(); Case downloadedCase = bf.Deserialize(memory) as Case; UIManager.Instance.activeCase = downloadedCase; if (onComplete != null) { onComplete(); } } } }); } else { Debug.Log("Case File Not Found"); } } else { Debug.LogError("Exception occured during getting list: " + response.Exception); } }); }