/// <summary> /// A simple function that takes a string and does a ToUpper /// </summary> /// <param name="evnt"></param> /// <param name="context"></param> /// <returns></returns> public string FunctionHandler(CloudWatchEvent <dynamic> evnt, ILambdaContext context) { // All log statements are written to CloudWatch by default. For more information, see // https://docs.aws.amazon.com/lambda/latest/dg/nodejs-prog-model-logging.html context.Logger.LogLine(JsonSerializer.Serialize(evnt)); return("Done"); }
public async Task RunAsync(CloudWatchEvent <object> cloudWatchEvent, ILambdaContext context) { bool isLate; //For scheduled events of type (0/5 * * * * *) with a buffer of 10 secs if (cloudWatchEvent.Time.Minute % 5 == 0 && cloudWatchEvent.Time.Second < 11) { isLate = false; } else { isLate = true; } //Add your new vendor integration factory method here Dictionary <string, Func <IDispatchVendor> > additionalDispatchCreatorStrategies = new Dictionary <string, Func <IDispatchVendor> > { { "SampleBulkSendVendor", () => new SampleBulkSendVendor() } }; //Pass necessary run-time settings here DispatchHandler dispatchHandler = new DispatchHandler(_dbConnectionString, _dbName, 5, additionalDispatchCreatorStrategies, "SparkPost", 10000); await dispatchHandler.ProcessMultipleMessage(isLate); }
public string FunctionHandler(CloudWatchEvent <Object> ev, ILambdaContext context) { var eventReceived = JsonConvert.SerializeObject(ev); var datahora = DateTime.Now.ToString("dd'/'MM'/'yyyy HH:mm:ss"); LambdaLogger.Log($"Yes, logget at: {datahora}! Event received:{eventReceived}"); return("ok"); }
public void TestCloudWatchTerminateFailure() { // ARRANGE string json = instanceTerminateFailure.Trim().Replace("\r", "").Replace("\n", "").Replace("\t", ""); // ACT CloudWatchEvent <AutoScalingInstanceStateEvent> evt = JsonConvert.DeserializeObject <CloudWatchEvent <AutoScalingInstanceStateEvent> >(json); // ASSERT Assert.True(evt.Detail.IsFailure()); }
public async Task FunctionHandler(CloudWatchEvent <EventDetails> invocationEvent, ILambdaContext context) { LambdaLogger.Log("ENVIRONMENT VARIABLES: " + JsonSerializer.Serialize(System.Environment.GetEnvironmentVariables())); LambdaLogger.Log("CONTEXT: " + JsonSerializer.Serialize(context)); LambdaLogger.Log("EVENT: " + JsonSerializer.Serialize(invocationEvent)); //Do what is required for processing return; }
public async Task FaceDetection(CloudWatchEvent <RequestParameters> parameter, ILambdaContext context) { var bucketName = parameter.Detail.requestParameters["bucketName"]; var key = parameter.Detail.requestParameters["key"]; if (!SupportedImageTypes.Contains(Path.GetExtension(key))) { Console.WriteLine($"Object {bucketName}:{key} is not a supported image type"); } Console.WriteLine($"Looking for labels in image {bucketName}:{key}"); var detectResponses = await this.RekognitionClient.DetectLabelsAsync(new DetectLabelsRequest { MinConfidence = DEFAULT_MIN_CONFIDENCE, Image = new Image { S3Object = new Amazon.Rekognition.Model.S3Object { Bucket = bucketName, Name = key } } }); var tags = new List <Tag>(); foreach (var label in detectResponses.Labels) { if (tags.Count < 10) { Console.WriteLine($"\tFound Label {label.Name} with confidence {label.Confidence}"); tags.Add(new Tag { Key = label.Name, Value = label.Confidence.ToString() }); await dynamoDB.InsertLabelAsync(key, label.Name, label.Confidence); } else { Console.WriteLine($"\tSkipped label {label.Name} with confidence {label.Confidence} because the maximum number of tags has been reached"); } } await this.S3Client.PutObjectTaggingAsync(new PutObjectTaggingRequest { BucketName = bucketName, Key = key, Tagging = new Tagging { TagSet = tags } }); }
public void TestCloudWatchEventLaunchFailure() { // ARRANGE string json = instanceLaunchFailure.Trim().Replace("\r", "").Replace("\n", "").Replace("\t", ""); // ACT CloudWatchEvent <AutoScalingInstanceStateEvent> evt = JsonConvert.DeserializeObject <CloudWatchEvent <AutoScalingInstanceStateEvent> >(json); // ASSERT Assert.True(evt.Detail.IsFailure()); Assert.Equal(Guid.Parse("87654321-4321-4321-4321-210987654321"), evt.Detail.ActivityId); }
public void TestCloudWatchEventLifecycleTerminate() { // ARRANGE string json = lifecycleInstanceTerminateEvent.Trim().Replace("\r", "").Replace("\n", "").Replace("\t", ""); // ACT CloudWatchEvent <AutoScalingLifecycleEvent> msg = JsonConvert.DeserializeObject <CloudWatchEvent <AutoScalingLifecycleEvent> >(json); // ASSERT Assert.Equal("i-1234567890abcdef0", msg.Detail.EC2InstanceId); Assert.Equal("autoscaling:EC2_INSTANCE_TERMINATING", msg.Detail.LifecycleTransition); }
// Working on Thumnbnail public async Task <string> FunctionHandler(CloudWatchEvent <RequestParameters> parameter, ILambdaContext context) { var bucketName = parameter.Detail.requestParameters["bucketName"]; var key = parameter.Detail.requestParameters["key"]; try { var rs = await this.S3Client.GetObjectMetadataAsync( bucketName, key); if (rs.Headers.ContentType.StartsWith("image/")) { using (GetObjectResponse response = await S3Client.GetObjectAsync( bucketName, key)) { using (Stream responseStream = response.ResponseStream) { using (StreamReader reader = new StreamReader(responseStream)) { using (var memstream = new MemoryStream()) { var buffer = new byte[512]; var bytesRead = default(int); while ((bytesRead = reader.BaseStream.Read(buffer, 0, buffer.Length)) > 0) { memstream.Write(buffer, 0, bytesRead); } // Perform image manipulation var transformedImage = Thumbnail.GetConvertedImage(memstream.ToArray()); PutObjectRequest putRequest = new PutObjectRequest() { BucketName = bucketName, Key = key, ContentType = rs.Headers.ContentType, ContentBody = transformedImage }; await S3Client.PutObjectAsync(putRequest); } } } } } return(rs.Headers.ContentType); } catch (Exception e) { throw; } }
public void TestCloudWatchEventLaunchSuccess() { // ARRANGE string json = instanceLaunchSuccess.Trim().Replace("\r", "").Replace("\n", "").Replace("\t", ""); // ACT CloudWatchEvent <AutoScalingInstanceStateEvent> evt = JsonConvert.DeserializeObject <CloudWatchEvent <AutoScalingInstanceStateEvent> >(json); string content = JsonConvert.SerializeObject(evt, Formatting.None); // ASSERT Assert.True(evt.Detail.IsSuccess()); Assert.True(evt.Detail.IsInstanceLaunchSuccess()); }
//--- Methods --- public string FunctionHandler(CloudWatchEvent <EventDetails> request, ILambdaContext context) { Console.WriteLine($"Version = {request.Version}"); Console.WriteLine($"Account = {request.Account}"); Console.WriteLine($"Region = {request.Region}"); Console.WriteLine($"Detail = {JsonConvert.SerializeObject(request.Detail)}"); Console.WriteLine($"DetailType = {request.DetailType}"); Console.WriteLine($"Source = {request.Source}"); Console.WriteLine($"Time = {request.Time}"); Console.WriteLine($"Id = {request.Id}"); Console.WriteLine($"Resources = [{string.Join(",", request.Resources ?? Enumerable.Empty<string>())}]"); return("Okay"); }
public override async Task <FunctionResponse> ProcessMessageAsync(CloudWatchEvent <EventDetails> request) { LogInfo($"Version = {request.Version}"); LogInfo($"Account = {request.Account}"); LogInfo($"Region = {request.Region}"); LogInfo($"Detail = {LambdaSerializer.Serialize(request.Detail)}"); LogInfo($"DetailType = {request.DetailType}"); LogInfo($"Source = {request.Source}"); LogInfo($"Time = {request.Time}"); LogInfo($"Id = {request.Id}"); LogInfo($"Resources = [{string.Join(",", request.Resources ?? Enumerable.Empty<string>())}]"); LogInfo($"Latency = {DateTime.UtcNow - request.Time}"); return(new FunctionResponse()); }
public void TestToUpperFunction() { // Invoke the lambda function and confirm the string was upper cased. var function = new Function(); var context = new TestLambdaContext(); var eventPayload = new CloudWatchEvent <dynamic>() { Id = "cdc73f9d-aea9-11e3-9d5a-835b769c0d9c", DetailType = "Scheduled Event", Source = "aws.events", Account = "", Time = Convert.ToDateTime("1970-01-01T00:00:00Z"), Region = "us-west-2", Resources = new List <string>() { "arn:aws:events:us-west-2:123456789012:rule/ExampleRule" }, Detail = new { } }; var functionResult = function.FunctionHandler(eventPayload, context); Assert.Equal("Done", functionResult); }
public async Task FunctionHandler(CloudWatchEvent <object> cloudWatchLogsEvent, ILambdaContext context) { LambdaLogger.Log("Start\n"); var serviceCollection = new ServiceCollection(); ConfigureServices(serviceCollection); var serviceProvider = serviceCollection.BuildServiceProvider(); var bookmarkService = serviceProvider.GetService <IBookmarkService>(); var kandilliService = serviceProvider.GetService <IKandilliService>(); var environmentService = serviceProvider.GetService <IEnvironmentService>(); var queueService = serviceProvider.GetService <IQueueService>(); var subscriptionService = serviceProvider.GetService <ISubscribtionService>(); var lastFetch = await bookmarkService.GetBookmark("last_fetch_date"); var channelName = environmentService.GetEnvironmentValue(TELEGRAM_CHANNEL_NAME); DateTime.TryParse(lastFetch, out DateTime lastFetchDate); var earthquakes = await kandilliService.GetEarthquakes(); var newEarthquakes = earthquakes .Where(q => q.Date > lastFetchDate) .OrderByDescending(q => q.Date) .Take(10) .OrderBy(q => q.Date); if (newEarthquakes.Count() == 0) { LambdaLogger.Log("No new earthquakes\nFinish\n"); return; } LambdaLogger.Log(newEarthquakes.Count() + " new earthquakes\n"); var lastNotifiedEarthQuake = lastFetchDate; int searchRadius = 200000; //in meters, 200km foreach (var newEarthQuake in newEarthquakes) { var subscribers = await subscriptionService.GetAsync(newEarthQuake.Magnitude, newEarthQuake.Latitude, newEarthQuake.Longitude, searchRadius); int i = 0; int chunkSize = 10; int[][] chunks = subscribers.GroupBy(s => i++ / chunkSize).Select(g => g.ToArray()).ToArray(); //Send to subscribers foreach (var chunk in chunks) { await queueService.Enqueue(chunk.Select(c => new TelegramMessage { ChatId = c.ToString(), ParseMode = "markdown", DisableWebPagePreview = true, Text = newEarthQuake.ToTelegramMessage() })); } //Send to channel await queueService.Enqueue(new List <TelegramMessage> { new TelegramMessage { ChatId = $"@{channelName}", DisableNotification = newEarthQuake.Magnitude < 4, ParseMode = "markdown", DisableWebPagePreview = true, Text = newEarthQuake.ToTelegramMessage() } }); lastNotifiedEarthQuake = newEarthQuake.Date; } await bookmarkService.SetBookmark("last_fetch_date", lastNotifiedEarthQuake.ToString()); LambdaLogger.Log("Finish\n"); }
public void ConsumerCase3Handler(CloudWatchEvent <EventDetail> logsEvent, ILambdaContext context) { context.Logger.LogLine(JsonConvert.SerializeObject(logsEvent)); }
public CloudWatchEventsLambda(CloudWatchEvent <object> input) : base(input) { }
public async Task Handler(CloudWatchEvent <object> evnt, ILambdaContext context) { context.Logger.LogLine("LOGGING FROM APPLICATION: " + JsonSerializer.Serialize(evnt)); }
public async Task FunctionHandler(CloudWatchEvent <string> inputData) { await Task.Delay(100); }
/// <summary> /// The <see cref="ProcessMessageStreamAsync(Stream)"/> method is overridden to /// provide specific behavior for this base class. /// </summary> /// <remarks> /// This method cannot be overridden. /// </remarks> /// <param name="stream">The stream with the request payload.</param> /// <returns>The task object representing the asynchronous operation.</returns> public override sealed async Task <Stream> ProcessMessageStreamAsync(Stream stream) { // read stream into memory LogInfo("reading stream body"); string cloudWatchEventBody; using (var reader = new StreamReader(stream)) { cloudWatchEventBody = reader.ReadToEnd(); } var stopwatch = Stopwatch.StartNew(); var metrics = new List <LambdaMetric>(); // process received event (there is only ever one) try { // cloudwatch event deserialization LogInfo("deserializing CloudWatch event"); try { // deserialize using JsonElement as type for 'Detail' property; this allows us to re-deserialize it with the custom lambda serializer var cloudWatchEvent = LambdaSerializerSettings.LambdaSharpSerializer.Deserialize <CloudWatchEvent <JsonElement> >(cloudWatchEventBody); // message deserialization LogInfo("deserializing event detail"); var detail = Deserialize(cloudWatchEvent.DetailType, cloudWatchEvent.Detail.GetRawText()); // process event LogInfo("processing event"); _currentEvent = new CloudWatchEvent <TMessage> { Account = cloudWatchEvent.Account, Detail = detail, DetailType = cloudWatchEvent.DetailType, Id = cloudWatchEvent.Id, Region = cloudWatchEvent.Region, Resources = cloudWatchEvent.Resources, Source = cloudWatchEvent.Source, Time = cloudWatchEvent.Time, Version = cloudWatchEvent.Version }; await ProcessEventAsync(_currentEvent.Detail); // record successful processing metrics stopwatch.Stop(); var now = DateTimeOffset.UtcNow; metrics.Add(("MessageSuccess.Count", 1, LambdaMetricUnit.Count)); metrics.Add(("MessageSuccess.Latency", stopwatch.Elapsed.TotalMilliseconds, LambdaMetricUnit.Milliseconds)); metrics.Add(("MessageSuccess.Lifespan", (now - CurrentEvent.Time).TotalSeconds, LambdaMetricUnit.Seconds)); return("Ok".ToStream()); } catch (Exception e) { LogError(e); try { // attempt to send failed event to the dead-letter queue await RecordFailedMessageAsync(LambdaLogLevel.ERROR, FailedMessageOrigin.CloudWatch, LambdaSerializer.Serialize(cloudWatchEventBody), e); // record failed processing metrics metrics.Add(("MessageDead.Count", 1, LambdaMetricUnit.Count)); } catch { // NOTE (2020-04-22, bjorg): since the event could not be sent to the dead-letter queue, // the next best action is to let Lambda retry it; unfortunately, there is no way // of knowing how many attempts have occurred already. // unable to forward event to dead-letter queue; report failure to lambda so it can retry metrics.Add(("MessageFailed.Count", 1, LambdaMetricUnit.Count)); throw; } return($"ERROR: {e.Message}".ToStream()); } } finally { _currentEvent = null; LogMetric(metrics); } }
public void FunctionHandler(CloudWatchEvent <object> input, ILambdaContext context) { var payloadModel = JsonSerializer.Serialize(input); Console.WriteLine($"Input:..{payloadModel}"); }