public static int AddTask(IJob job, IAsset sourceAsset, string value, string processor, string presetfilename, string stringtoreplace, ref int taskindex, int priority = 10) { if (value != null) { // Get a media processor reference, and pass to it the name of the // processor to use for the specific task. IMediaProcessor mediaProcessor = MediaServicesHelper.GetLatestMediaProcessorByName(processor); string homePath = Environment.GetEnvironmentVariable("HOME", EnvironmentVariableTarget.Process); string presetPath; if (homePath == String.Empty) { presetPath = @"../presets/" + presetfilename; } else { //TODO: update pathing to make dynamic or config based presetPath = Path.Combine(homePath, @"site\repository\media-functions-for-logic-app\presets\" + presetfilename); } string Configuration = File.ReadAllText(presetPath).Replace(stringtoreplace, value); // Create a task with the encoding details, using a string preset. var task = job.Tasks.AddNew(processor + " task", mediaProcessor, Configuration, TaskOptions.None); task.Priority = priority; // Specify the input asset to be indexed. task.InputAssets.Add(sourceAsset); // Add an output asset to contain the results of the job. task.OutputAssets.AddNew(sourceAsset.Name + " " + processor + " Output", AssetCreationOptions.None); return(taskindex++); } else { return(-1); } }
public static async Task Run([QueueTrigger("%InputQueue%", Connection = "AzureWebJobsStorage")] VippyProcessingState manifest, [Blob("%AmsBlobInputContainer%/{BlobName}", FileAccess.ReadWrite)] CloudBlockBlob videoBlobTriggered, [Blob("%ExistingAmsBlobInputContainer%/{BlobName}", FileAccess.ReadWrite)] CloudBlockBlob videoBlobExisting, TraceWriter log) { //================================================================================ // Function AMSInputQueueHandler // Purpose: // This is where the start of the pipeline work begins. It will submit an encoding // job to Azure Media Services. When that job completes asyncronously, a notification // webhook will be called by AMS which causes the next stage of the pipeline to // continue. //================================================================================ CloudBlockBlob videoBlob = null; if (manifest.Origin == Enums.OriginEnum.Existing) { videoBlob = videoBlobExisting; } else if (manifest.Origin == Enums.OriginEnum.Trigger) { videoBlob = videoBlobTriggered; } if (videoBlob == null) { log.Error("There is being an error, videoblog not initialized, not marked as existing or trigger or video is null"); return; } var context = MediaServicesHelper.Context; var cosmosHelper = new CosmosHelper(log); var toDeleteContainerName = Environment.GetEnvironmentVariable("AmsBlobToDeleteContainer"); // only set the starttime if it wasn't already set in blob watcher function (that way // it works if the job is iniaited by using this queue directly if (manifest.StartTime == null) { manifest.StartTime = DateTime.Now; } var videofileName = videoBlob.Name; // get a new asset from the blob, and use the file name if video title attribute wasn't passed. IAsset newAsset; try { newAsset = BlobHelper.CreateAssetFromBlob(videoBlob, videofileName, log) .GetAwaiter().GetResult(); } catch (Exception e) { throw new ApplicationException($"Error occured creating asset from Blob;/r/n{e.Message}"); } // If an internal_id was passed in the metadata, use it within AMS (AlternateId) and Cosmos(Id - main document id) for correlation. // if not, generate a unique id. If the same id is ever reprocessed, all stored metadata // will be overwritten. newAsset.AlternateId = manifest.AlternateId; newAsset.Update(); manifest.AmsAssetId = newAsset.Id; log.Info($"Deleting the file {videoBlob.Name} from the container "); //move the video to the to delete folder await BlobHelper.Move(videoBlob.Container.Name, toDeleteContainerName, videofileName, log); //move the manifest to the to delete folder string manifestName = videofileName.Remove(videofileName.IndexOf('.')) + ".json"; log.Info($"Deleting the file {manifestName} from the container "); await BlobHelper.Move(videoBlob.Container.Name, toDeleteContainerName, manifestName, log); // copy blob into new asset // create the encoding job var job = context.Jobs.Create("MES encode from input container - ABR streaming"); // Get a media processor reference, and pass to it the name of the // processor to use for the specific task. var processor = MediaServicesHelper.GetLatestMediaProcessorByName("Media Encoder Standard"); var task = job.Tasks.AddNew("encoding task", processor, "Content Adaptive Multiple Bitrate MP4", TaskOptions.None ); task.Priority = 100; task.InputAssets.Add(newAsset); // setup webhook notification var keyBytes = new byte[32]; // Check for existing Notification Endpoint with the name "FunctionWebHook" var existingEndpoint = context.NotificationEndPoints.Where(e => e.Name == "FunctionWebHook").FirstOrDefault(); INotificationEndPoint endpoint; //if (existingEndpoint != null) //{ // endpoint = existingEndpoint; //} //else try { endpoint = context.NotificationEndPoints.Create("FunctionWebHook", NotificationEndPointType.WebHook, WebHookEndpoint, keyBytes); } catch (Exception) { throw new ApplicationException( $"The endpoing address specified - '{WebHookEndpoint}' is not valid."); } task.TaskNotificationSubscriptions.AddNew(NotificationJobState.FinalStatesOnly, endpoint, false); cosmosHelper.LogMessage($"Add an output asset to contain the results of the job"); // Add an output asset to contain the results of the job. // This output is specified as AssetCreationOptions.None, which // means the output asset is not encrypted. task.OutputAssets.AddNew(videofileName, AssetCreationOptions.None); // Starts the job in AMS. AMS will notify the webhook when it completes job.Submit(); cosmosHelper.LogMessage($"Saving on cosmos DB"); // update processing progress with id and metadata payload await cosmosHelper.StoreProcessingStateRecordInCosmosAsync(manifest); cosmosHelper.LogMessage($"AMS encoding job submitted for {videofileName}"); }
public static async Task Run([QueueTrigger("ams-input", Connection = "AzureWebJobsStorage")] VippyProcessingState manifest, [Blob("%amsBlobInputContainer%/{BlobName}", FileAccess.ReadWrite)] CloudBlockBlob videoBlob, TraceWriter log) { //================================================================================ // Function AMSInputQueueHandler // Purpose: // This is where the start of the pipeline work begins. It will submit an encoding // job to Azure Media Services. When that job completes asyncronously, a notification // webhook will be called by AMS which causes the next stage of the pipeline to // continue. //================================================================================ var context = MediaServicesHelper.Context; // only set the starttime if it wasn't already set in blob watcher function (that way // it works if the job is iniaited by using this queue directly if (manifest.StartTime == null) { manifest.StartTime = DateTime.Now; } var videofileName = videoBlob.Name; var videoTitle = manifest.videoTitle ?? videofileName; // get a new asset from the blob, and use the file name if video title attribute wasn't passed. IAsset newAsset; try { newAsset = CopyBlobHelper.CreateAssetFromBlob(videoBlob, videoTitle, log).GetAwaiter().GetResult(); } catch (Exception e) { throw new ApplicationException($"Error occured creating asset from Blob;/r/n{e.Message}"); } // If an internal_id was passed in the metadata, use it within AMS (AlternateId) and Cosmos(Id - main document id) for correlation. // if not, generate a unique id. If the same id is ever reprocessed, all stored metadata // will be overwritten. newAsset.AlternateId = manifest.AlternateId; newAsset.Update(); manifest.AmsAssetId = newAsset.Id; // delete the source input from the watch folder videoBlob.DeleteIfExists(); // copy blob into new asset // create the encoding job var job = context.Jobs.Create("MES encode from input container - ABR streaming"); // Get a media processor reference, and pass to it the name of the // processor to use for the specific task. var processor = MediaServicesHelper.GetLatestMediaProcessorByName("Media Encoder Standard"); var task = job.Tasks.AddNew("encoding task", processor, "Content Adaptive Multiple Bitrate MP4", TaskOptions.None ); task.Priority = 100; task.InputAssets.Add(newAsset); // setup webhook notification //byte[] keyBytes = Convert.FromBase64String(_signingKey); var keyBytes = new byte[32]; // Check for existing Notification Endpoint with the name "FunctionWebHook" var existingEndpoint = context.NotificationEndPoints.Where(e => e.Name == "FunctionWebHook").FirstOrDefault(); INotificationEndPoint endpoint = null; if (existingEndpoint != null) { endpoint = existingEndpoint; } else { try { //byte[] credential = new byte[64]; endpoint = context.NotificationEndPoints.Create("FunctionWebHook", NotificationEndPointType.WebHook, WebHookEndpoint, keyBytes); } catch (Exception) { throw new ApplicationException( $"The endpoing address specified - '{WebHookEndpoint}' is not valid."); } } task.TaskNotificationSubscriptions.AddNew(NotificationJobState.FinalStatesOnly, endpoint, false); // Add an output asset to contain the results of the job. // This output is specified as AssetCreationOptions.None, which // means the output asset is not encrypted. task.OutputAssets.AddNew(videofileName, AssetCreationOptions.None); // Starts the job in AMS. AMS will notify the webhook when it completes job.Submit(); // update processing progress with id and metadata payload await Globals.StoreProcessingStateRecordInCosmosAsync(manifest); Globals.LogMessage(log, $"AMS encoding job submitted for {videofileName}"); }