public async Task Handler(JToken @event, ILambdaContext context) { string statusMessage; try { statusMessage = @event["error"]["Cause"].ToString(); } catch { statusMessage = "Unknown. Failed to parse error message."; } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "FAILED", StatusMessage = statusMessage }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 18 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var metadata = @event["input"]["metadata"].ToMcmaObject <DescriptiveMetadata>(); var bmc = CreateBmContent(metadata.Name, metadata.Description); bmc = await resourceManager.CreateAsync(bmc); if (bmc.Id == null) { throw new Exception("Failed to register BMContent"); } return(bmc.Id); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 81 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var bme = await resourceManager.ResolveAsync <BMEssence>(@event["data"]["bmEssence"]?.ToString()); bme.Locations = new Locator[] { @event["data"]["websiteFile"]?.ToMcmaObject <S3Locator>() }; bme = await resourceManager.UpdateAsync(bme); return(bme.Id); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 9 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var inputFile = @event["input"]["inputFile"].ToMcmaObject <S3Locator>(); var s3Bucket = REPOSITORY_BUCKET; var s3Key = yyyymmdd() + "/" + Guid.NewGuid(); var idxLastDot = inputFile.AwsS3Key.LastIndexOf("."); if (idxLastDot > 0) { s3Key += inputFile.AwsS3Key.Substring(idxLastDot); } try { var s3Client = new AmazonS3Client(); var destBucketLocation = await s3Client.GetBucketLocationAsync(s3Bucket); var regionEndpoint = RegionEndpoint.GetBySystemName(!string.IsNullOrWhiteSpace(destBucketLocation.Location) ? (string)destBucketLocation.Location : "us-east-1"); var copyClient = new AmazonS3Client(regionEndpoint); await copyClient.CopyObjectAsync(new CopyObjectRequest { SourceBucket = inputFile.AwsS3Bucket, SourceKey = inputFile.AwsS3Key, DestinationBucket = s3Bucket, DestinationKey = s3Key }); } catch (Exception error) { throw new Exception("Unable to read input file in bucket '" + inputFile.AwsS3Bucket + "' with key '" + inputFile.AwsS3Key + "' due to error: " + error); } return(new S3Locator { AwsS3Bucket = s3Bucket, AwsS3Key = s3Key }.ToMcmaJson()); }
internal static async Task DeleteJobProcessAsync(JobRepositoryWorkerRequest @event) { var jobProcessId = @event.JobProcessId; try { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); await resourceManager.DeleteAsync <JobProcess>(jobProcessId); } catch (Exception error) { Logger.Exception(error); } }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 90 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", "AiWorkflow")); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception("JobProfile 'AiWorkflow' not found"); } var workflowJob = new WorkflowJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["bmContent"] = @event["data"]["bmContent"], ["bmEssence"] = @event["data"]["bmEssence"] } }; workflowJob = await resourceManager.CreateAsync(workflowJob); return(JToken.FromObject(new { aiWorkflow = workflowJob.Id, bmContent = @event["data"]["bmContent"], websiteMediaFile = @event["data"]["websiteFile"] })); }
public async Task Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = JobStatus.Completed, Progress = 100, JobOutput = @event["output"]?.ToMcmaObject <JobParameterBag>() }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } }
public async Task Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 54 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } @event = JToken.Parse(data.Input); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", JOB_PROFILE_NAME)); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception($"JobProfile '{JOB_PROFILE_NAME}' not found"); } var createProxyJob = new TransformJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = @event["data"]["repositoryFile"], ["outputLocation"] = new S3Locator { AwsS3Bucket = REPOSITORY_BUCKET, AwsS3KeyPrefix = "TransformJobResults/" } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; createProxyJob = await resourceManager.CreateAsync(createProxyJob); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 36 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var ameJobId = GetAmeJobId(@event); if (ameJobId == null) { throw new Exception("Failed to obtain AmeJob ID"); } Logger.Debug("[AmeJobID]: " + ameJobId); var ameJob = await resourceManager.ResolveAsync <AmeJob>(ameJobId); S3Locator outputFile; if (!ameJob.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception("Unable to get outputFile from AmeJob output."); } var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; GetObjectResponse s3Object; try { var s3 = await outputFile.GetClientAsync(); s3Object = await s3.GetObjectAsync(new GetObjectRequest { BucketName = s3Bucket, Key = s3Key }); } catch (Exception error) { throw new Exception("Unable to get media info file in bucket '" + s3Bucket + "' with key '" + s3Key + " due to error: " + error); } var mediaInfo = JToken.Parse(await new StreamReader(s3Object.ResponseStream).ReadToEndAsync()); var bmc = await resourceManager.ResolveAsync <BMContent>(@event["data"]["bmContent"].ToString()); Logger.Debug("[BMContent]: " + bmc.ToMcmaJson()); Logger.Debug("[@event]:" + @event.ToMcmaJson().ToString()); Logger.Debug("[mediaInfo]:" + mediaInfo.ToMcmaJson().ToString()); var bme = CreateBmEssence(bmc, @event["data"]["repositoryFile"].ToMcmaObject <S3Locator>(), mediaInfo); Logger.Debug("Serializing essence..."); Logger.Debug("[bme]:" + bme.ToMcmaJson().ToString()); Logger.Debug("Creating essence..."); bme = await resourceManager.CreateAsync(bme); if (bme.Id == null) { throw new Exception("Failed to register BMEssence"); } Logger.Debug("[BMEssence ID]: " + bme.Id); bmc.BmEssences.Add(bme.Id); bmc = await resourceManager.UpdateAsync <BMContent>(bmc); return(bme.Id); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 63 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // get transform job id var transformJobId = GetTransformJobId(@event); // in case we did not do a transcode, just return the existing essence ID if (transformJobId == null) { return(@event["data"]["bmEssence"].Value <string>()); } // var transformJob = await resourceManager.ResolveAsync <TransformJob>(transformJobId); S3Locator outputFile; if (!transformJob.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception("Unable to get outputFile from AmeJob output."); } var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; var bmc = await resourceManager.ResolveAsync <BMContent>(@event["data"]["bmContent"]?.ToString()); var locator = new S3Locator { AwsS3Bucket = s3Bucket, AwsS3Key = s3Key }; var bme = CreateBmEssence(bmc, locator); bme = await resourceManager.CreateAsync(bme); if (bme?.Id == null) { throw new Exception("Failed to register BMEssence."); } bmc.BmEssences.Add(bme.Id); bmc = await resourceManager.UpdateAsync(bmc); return(bme.Id); }
public async Task Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["detect-celebrities-aws"] = 80 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // get ai job id (first non null entry in array) var jobId = @event["data"]["awsCelebritiesJobId"]?.FirstOrDefault(id => id != null)?.Value <string>(); if (jobId == null) { throw new Exception("Failed to obtain awsCelebritiesJobId"); } Logger.Debug("[awsCelebritiesJobId]:", jobId); // get result of ai job var job = await resourceManager.ResolveAsync <AIJob>(jobId); S3Locator outputFile; if (!job.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception($"AI job '{jobId}' does not specify an output file."); } // get the response from Rekognition, stored as a file on S3 var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; GetObjectResponse s3Object; try { var s3Client = new AmazonS3Client(); s3Object = await s3Client.GetObjectAsync(new GetObjectRequest { BucketName = s3Bucket, Key = s3Key, }); } catch (Exception error) { throw new Exception("Unable to celebrities info file in bucket '" + s3Bucket + "' with key '" + s3Key + "'", error); } // read the result from the file in S3 as a Rekognition response object var celebritiesResult = (await s3Object.ResponseStream.ReadJsonFromStreamAsync()).ToMcmaObject <GetCelebrityRecognitionResponse>(); var celebrityRecognitionList = new List <CelebrityRecognition>(); var lastRecognitions = new Dictionary <string, long>(); foreach (var celebrity in celebritiesResult.Celebrities) { // get the timestamp of the last time we hit a recognition for this celebrity (if any) var lastRecognized = lastRecognitions.ContainsKey(celebrity.Celebrity.Name) ? lastRecognitions[celebrity.Celebrity.Name] : default(long?); // we only want recognitions at 3 second intervals, and only when the confidence is at least 50% if ((!lastRecognized.HasValue || celebrity.Timestamp - lastRecognized.Value > 3000) && celebrity.Celebrity.Confidence > 50) { // mark the timestamp of the last recognition for this celebrity lastRecognitions[celebrity.Celebrity.Name] = celebrity.Timestamp; // add to the list that we actually want to store celebrityRecognitionList.Add(celebrity); } } // store the filtered results back on the original object celebritiesResult.Celebrities = celebrityRecognitionList; Logger.Debug("AWS Celebrities result", celebritiesResult.ToMcmaJson().ToString()); var bmContent = await resourceManager.ResolveAsync <BMContent>(@event["input"]["bmContent"].Value <string>()); // store the celebrity data back onto the AwsAiMetadata property on the BMContent, either using the existing object or creating a new one bmContent .GetOrAdd <McmaExpandoObject>("awsAiMetadata") .Set("celebrities", celebritiesResult.ToMcmaJson(true)); await resourceManager.UpdateAsync(bmContent); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["detect-celebrities-aws"] = 100 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["speech-text-translate"] = 60 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } @event = JToken.Parse(data.Input); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", JOB_PROFILE_NAME)); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception($"JobProfile '{JOB_PROFILE_NAME}' not found"); } // writing speech transcription to a textfile in temp bucket var bmContent = await resourceManager.ResolveAsync <BMContent>(@event["input"]["bmContent"].Value <string>()); // get the transcript from the BMContent var transcript = bmContent.Get <McmaExpandoObject>("awsAiMetadata") ?.Get <McmaExpandoObject>("transcription") ?.Get <string>("original"); if (transcript == null) { throw new Exception("Missing transcription on BMContent"); } var s3Params = new PutObjectRequest { BucketName = TEMP_BUCKET, Key = "AiInput/" + Guid.NewGuid() + ".txt", ContentBody = transcript }; var s3Client = new AmazonS3Client(); await s3Client.PutObjectAsync(s3Params); var job = new AIJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = new S3Locator { AwsS3Bucket = s3Params.BucketName, AwsS3Key = s3Params.Key }, ["targetLanguageCode"] = "ja", ["outputLocation"] = new S3Locator { AwsS3Bucket = TEMP_BUCKET, AwsS3Key = JOB_RESULTS_PREFIX } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; job = await resourceManager.CreateAsync(job); return(job.Id); }
public async Task Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["speech-text-translate"] = 40 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // get ai job id (first non null entry in array) var jobId = @event["data"]["transcribeJobId"]?.FirstOrDefault(id => id != null)?.Value <string>(); if (jobId == null) { throw new Exception("Failed to obtain TranscribeJobId"); } Logger.Debug("[TranscribeJobId]:", jobId); // get result of ai job var job = await resourceManager.ResolveAsync <AIJob>(jobId); S3Locator outputFile; if (!job.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception($"AI job '{jobId}' does not specify an output file."); } // get media info var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; GetObjectResponse s3Object; try { var s3Client = new AmazonS3Client(); s3Object = await s3Client.GetObjectAsync(new GetObjectRequest { BucketName = s3Bucket, Key = s3Key, }); } catch (Exception error) { throw new Exception("Unable to media info file in bucket '" + s3Bucket + "' with key '" + s3Key + "'", error); } var transcriptionResult = await s3Object.ResponseStream.ReadJsonFromStreamAsync(); Logger.Debug("Transcription result: {0}", transcriptionResult.ToString(Formatting.Indented)); var transcripts = transcriptionResult["results"]["transcripts"]; Logger.Debug(transcripts.ToString(Formatting.Indented)); var transcript = string.Empty; foreach (var ts in transcripts) { transcript += ts["transcript"].Value <string>(); } var bmContent = await resourceManager.ResolveAsync <BMContent>(@event["input"]["bmContent"].Value <string>()); bmContent .GetOrAdd <McmaExpandoObject>("awsAiMetadata") .GetOrAdd <McmaExpandoObject>("transcription") .Set("original", transcript); await resourceManager.UpdateAsync(bmContent); }
public async Task <S3Locator> Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 0 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // check the input and return mediaFileLocator which service as input for the AI workflows if (@event["input"] == null) { throw new Exception("Missing workflow input"); } var input = @event["input"]; if (input["bmContent"] == null) { throw new Exception("Missing input.bmContent"); } if (input["bmEssence"] == null) { throw new Exception("Missing input.bmEssence"); } var bmContent = await resourceManager.ResolveAsync <BMContent>(input["bmContent"].Value <string>()); var bmEssence = await resourceManager.ResolveAsync <BMEssence>(input["bmEssence"].Value <string>()); Logger.Debug(bmContent.ToMcmaJson().ToString()); Logger.Debug(bmEssence.ToMcmaJson().ToString()); // find the media locator in the website bucket with public httpEndpoint var mediaFileLocator = bmEssence.Locations.OfType <S3Locator>().FirstOrDefault(l => l.AwsS3Bucket == WEBSITE_BUCKET); if (mediaFileLocator == null) { throw new Exception("No suitable Locator found on bmEssence"); } if (string.IsNullOrWhiteSpace(mediaFileLocator.HttpEndpoint)) { throw new Exception("Media file Locator does not have an httpEndpoint"); } return(mediaFileLocator); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["speech-text-translate"] = 20 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } @event = JToken.Parse(data.Input); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", JOB_PROFILE_NAME)); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception($"JobProfile '{JOB_PROFILE_NAME}' not found"); } var job = new AIJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = @event["data"]["mediaFileLocator"], ["outputLocation"] = new S3Locator { AwsS3Bucket = TEMP_BUCKET, AwsS3KeyPrefix = JOB_RESULTS_PREFIX } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; job = await resourceManager.CreateAsync(job); return(job.Id); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 0 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var input = @event["input"]; if (input == null) { throw new Exception("Missing workflow input"); } var metadata = input["metadata"]?.ToMcmaObject <DescriptiveMetadata>(); if (metadata == null) { throw new Exception("Missing input.metadata"); } if (metadata.Name == null) { throw new Exception("Missing input.metadata.name"); } if (metadata.Description == null) { throw new Exception("Missing input.metadata.description"); } var inputFile = input["inputFile"]?.ToMcmaObject <S3Locator>(); if (inputFile == null) { throw new Exception("Missing input.inputFile"); } var s3Bucket = inputFile.AwsS3Bucket; var s3Key = inputFile.AwsS3Key; var client = new AmazonS3Client(); GetObjectMetadataResponse data; try { data = await client.GetObjectMetadataAsync(new GetObjectMetadataRequest { BucketName = s3Bucket, Key = s3Key }); } catch (Exception error) { throw new Exception("Unable to read input file in bucket '" + s3Bucket + "' with key '" + s3Key + "'.", error); } return(JObject.FromObject(data.Metadata.Keys.ToDictionary(k => k, k => data.Metadata[k]))); }
public async Task Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["detect-celebrities-azure"] = 80 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // get ai job id (first non null entry in array) var jobId = @event["data"]["azureCelebritiesJobId"]?.FirstOrDefault(id => id != null)?.Value <string>(); if (jobId == null) { throw new Exception("Failed to obtain azureCelebritiesJobId"); } Logger.Debug("[azureCelebritiesJobId]:", jobId); // get result of ai job var job = await resourceManager.ResolveAsync <AIJob>(jobId); S3Locator outputFile; if (!job.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception($"AI job '{jobId}' does not specify an output file."); } // get media info var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; GetObjectResponse s3Object; try { var s3Client = new AmazonS3Client(); s3Object = await s3Client.GetObjectAsync(new GetObjectRequest { BucketName = s3Bucket, Key = s3Key, }); } catch (Exception error) { throw new Exception("Unable to data file in bucket '" + s3Bucket + "' with key '" + s3Key + "'", error); } var azureResult = await s3Object.ResponseStream.ReadJsonFromStreamAsync(); Logger.Debug("AzureResult: {0}", azureResult.ToString(Formatting.Indented)); var bmContent = await resourceManager.ResolveAsync <BMContent>(@event["input"]["bmContent"].Value <string>()); // set response on the AzureAiMetadata object on the BMContent bmContent["azureAiMetadata"] = azureResult.ToMcmaObject <McmaExpandoObject>(); await resourceManager.UpdateAsync(bmContent); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["detect-celebrities-azure"] = 100 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } }
public async Task <string> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 27 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); Logger.Debug($"Getting Activity Task with ARN {ACTIVITY_ARN}..."); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } Logger.Debug($"Activity Task token is {taskToken}"); @event = JToken.Parse(data.Input); Logger.Debug($"Getting job profile 'ExtractTechnicalMetadata'..."); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", "ExtractTechnicalMetadata")); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception("JobProfile 'ExtractTechnicalMetadata' not found"); } var ameJob = new AmeJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = @event["data"]["repositoryFile"], ["outputLocation"] = new S3Locator { AwsS3Bucket = TEMP_BUCKET, AwsS3KeyPrefix = "AmeJobResults/" } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; Logger.Debug($"Submitting AME job..."); ameJob = await resourceManager.CreateAsync(ameJob); Logger.Debug($"Successfully created AME job {ameJob.Id}."); return(ameJob.Id); }
public async Task <S3Locator> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 72 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var transformJobId = GetTransformJobId(@event); S3Locator outputFile; if (transformJobId == null) { Logger.Debug("Transform job ID is null. Transform was not done. Using original essence as proxy."); var bme = await resourceManager.ResolveAsync <BMEssence>(@event["data"]["bmEssence"]?.ToString()); outputFile = (S3Locator)bme.Locations[0]; } else { Logger.Debug($"Getting proxy location from transform job {transformJobId}."); var transformJob = await resourceManager.ResolveAsync <TransformJob>(transformJobId); outputFile = transformJob.JobOutput.Get <S3Locator>(nameof(outputFile)); } var s3Bucket = WEBSITE_BUCKET; var s3Key = "media/" + Guid.NewGuid(); var idxLastDot = outputFile.AwsS3Key.LastIndexOf("."); if (idxLastDot > 0) { s3Key += outputFile.AwsS3Key.Substring(idxLastDot); } var s3 = new AmazonS3Client(); var data = await s3.GetBucketLocationAsync(s3Bucket); try { var copyParams = new CopyObjectRequest { SourceBucket = outputFile.AwsS3Bucket, SourceKey = outputFile.AwsS3Key, DestinationBucket = s3Bucket, DestinationKey = s3Key }; var regionEndpoint = RegionEndpoint.GetBySystemName(!string.IsNullOrWhiteSpace(data.Location) ? (string)data.Location : "us-east-1"); var destS3 = new AmazonS3Client(regionEndpoint); await destS3.CopyObjectAsync(copyParams); } catch (Exception error) { throw new Exception("Unable to read input file in bucket '" + s3Bucket + "' with key '" + s3Key + "' due to error: " + error); } var s3SubDomain = !string.IsNullOrWhiteSpace(data.Location) ? $"s3-{data.Location}" : "s3"; var httpEndpoint = "https://" + s3SubDomain + ".amazonaws.com/" + s3Bucket + "/" + s3Key; return(new S3Locator { AwsS3Bucket = s3Bucket, AwsS3Key = s3Key, HttpEndpoint = httpEndpoint }); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { Logger.Debug(@event.ToMcmaJson().ToString()); var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 45 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var bme = await resourceManager.ResolveAsync <BMEssence>(@event["data"]["bmEssence"].ToString()); var technicalMetadata = bme.Get <object>("technicalMetadata").ToMcmaJson(); var ebuCoreMain = technicalMetadata["ebucore:ebuCoreMain"]; var coreMetadata = ebuCoreMain["ebucore:coreMetadata"]?.FirstOrDefault(); var containerFormat = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:containerFormat"]?.FirstOrDefault(); var duration = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:duration"]?.FirstOrDefault(); var video = new { Codec = containerFormat["ebucore:codec"]?.FirstOrDefault()?["ebucore:codecIdentifier"]?.FirstOrDefault()?["dc:identifier"]?.FirstOrDefault()?["#value"], BitRate = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:videoFormat"]?.FirstOrDefault()?["ebucore:bitRate"]?.FirstOrDefault()?["#value"], Format = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:videoFormat"]?.FirstOrDefault()?["@videoFormatName"], NormalPlayTime = duration["ebucore:normalPlayTime"]?.FirstOrDefault()?["#value"] }; var codec = video.Codec?.ToString(); var format = video.Format?.ToString(); var parsedBitRate = double.TryParse(video.BitRate.ToString(), out var bitRate); var mbyte = parsedBitRate ? (bitRate / 8) / (1024 * 1024) : default(double?); if ((codec == VIDEO_CODEC || codec == VIDEO_CODEC_ISOM) && format == VIDEO_FORMAT && mbyte.HasValue && mbyte.Value <= VIDEO_BITRATE_MB) { return("none"); } var normalPlayTime = video.NormalPlayTime?.ToString() ?? string.Empty; double totalSeconds; var ptSeconds = Regex.Match(normalPlayTime, "PT([0-9\\.]+)S"); if (ptSeconds.Success) { totalSeconds = double.Parse(ptSeconds.Groups[1].Captures[0].Value); } else { var hour = Regex.Match(normalPlayTime, "(\\d*)H"); var min = Regex.Match(normalPlayTime, "(\\d*)M"); var sec = Regex.Match(normalPlayTime, "(\\d*)S"); if (!sec.Success) { throw new Exception($"Invalid play time in technical metadata: {normalPlayTime ?? "[null]"}"); } totalSeconds = CalcSeconds( hour.Success ? int.Parse(hour.Groups[1].Captures[0].Value) : 0, min.Success ? int.Parse(min.Groups[1].Captures[0].Value) : 0, double.Parse(sec.Groups[1].Captures[0].Value)); } Logger.Debug("[Total Seconds]: " + totalSeconds); return(totalSeconds <= THRESHOLD_SECONDS ? "short" : "long"); }