public static async Task <object> Run([HttpTrigger(WebHookType = "genericJson")] HttpRequestMessage req, TraceWriter log, Microsoft.Azure.WebJobs.ExecutionContext execContext)
        {
            int    taskindex = 0;
            bool   useEncoderOutputForAnalytics = false;
            IAsset outputEncoding = null;

            log.Info($"Webhook was triggered!");
            string triggerStart = DateTime.UtcNow.ToString("o");

            string jsonContent = await req.Content.ReadAsStringAsync();

            dynamic data = JsonConvert.DeserializeObject(jsonContent);

            log.Info(jsonContent);

            log.Info($"asset id : {data.assetId}");

            if (data.assetId == null)
            {
                // for test
                // data.assetId = "nb:cid:UUID:2d0d78a2-685a-4b14-9cf0-9afb0bb5dbfc";

                return(req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Please pass asset ID in the input object (assetId)"
                }));
            }


            IJob  job          = null;
            ITask taskEncoding = null;

            int OutputMES             = -1;
            int OutputMEPW            = -1;
            int OutputIndex1          = -1;
            int OutputIndex2          = -1;
            int OutputOCR             = -1;
            int OutputFaceDetection   = -1;
            int OutputMotion          = -1;
            int OutputSummarization   = -1;
            int OutputFaceRedaction   = -1;
            int OutputMesThumbnails   = -1;
            int OutputVideoAnnotation = -1;
            int NumberJobsQueue       = 0;

            MediaServicesCredentials amsCredentials = new MediaServicesCredentials();

            log.Info($"Using Azure Media Service Rest API Endpoint : {amsCredentials.AmsRestApiEndpoint}");

            try
            {
                AzureAdTokenCredentials tokenCredentials = new AzureAdTokenCredentials(amsCredentials.AmsAadTenantDomain,
                                                                                       new AzureAdClientSymmetricKey(amsCredentials.AmsClientId, amsCredentials.AmsClientSecret),
                                                                                       AzureEnvironments.AzureCloudEnvironment);

                AzureAdTokenProvider tokenProvider = new AzureAdTokenProvider(tokenCredentials);

                _context = new CloudMediaContext(amsCredentials.AmsRestApiEndpoint, tokenProvider);


                // find the Asset
                string assetid = (string)data.assetId;
                IAsset asset   = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault();

                if (asset == null)
                {
                    log.Info($"Asset not found {assetid}");
                    return(req.CreateResponse(HttpStatusCode.BadRequest, new
                    {
                        error = "Asset not found"
                    }));
                }

                if (data.useEncoderOutputForAnalytics != null && ((bool)data.useEncoderOutputForAnalytics) && (data.mesPreset != null || data.mes != null))  // User wants to use encoder output for media analytics
                {
                    useEncoderOutputForAnalytics = (bool)data.useEncoderOutputForAnalytics;
                }


                // Declare a new encoding job with the Standard encoder
                int priority = 10;
                if (data.priority != null)
                {
                    priority = (int)data.priority;
                }
                job = _context.Jobs.Create(((string)data.jobName) ?? "Azure Functions Job", priority);

                if (data.mes != null || data.mesPreset != null)  // MES Task
                {
                    // Get a media processor reference, and pass to it the name of the
                    // processor to use for the specific task.
                    IMediaProcessor processorMES = MediaServicesHelper.GetLatestMediaProcessorByName(_context, "Media Encoder Standard");

                    string preset = null;
                    if (data.mes != null)
                    {
                        preset = (string)data.mes.preset;
                    }
                    else
                    {
                        preset = (string)data.mesPreset; // Compatibility mode
                    }
                    if (preset == null)
                    {
                        preset = "Content Adaptive Multiple Bitrate MP4";  // the default preset
                    }

                    if (preset.ToUpper().EndsWith(".JSON"))
                    {
                        // Build the folder path to the preset
                        string presetPath = Path.Combine(System.IO.Directory.GetParent(execContext.FunctionDirectory).FullName, "presets", preset);
                        log.Info("presetPath= " + presetPath);
                        preset = File.ReadAllText(presetPath);
                    }

                    // Create a task with the encoding details, using a string preset.
                    // In this case "H264 Multiple Bitrate 720p" system defined preset is used.
                    taskEncoding = job.Tasks.AddNew("MES encoding task",
                                                    processorMES,
                                                    preset,
                                                    TaskOptions.None);

                    // Specify the input asset to be encoded.
                    taskEncoding.InputAssets.Add(asset);
                    OutputMES = taskindex++;

                    // Add an output asset to contain the results of the job.
                    // This output is specified as AssetCreationOptions.None, which
                    // means the output asset is not encrypted.
                    outputEncoding = taskEncoding.OutputAssets.AddNew(asset.Name + " MES encoded", JobHelpers.OutputStorageFromParam(data.mes), AssetCreationOptions.None);
                }

                if (data.mepw != null || data.workflowAssetId != null) // Premium Encoder Task
                {
                    //find the workflow asset
                    string workflowassetid = null;
                    if (data.mepw != null)
                    {
                        workflowassetid = (string)data.mepw.workflowAssetId;
                    }
                    else
                    {
                        workflowassetid = (string)data.workflowAssetId; // compatibility mode
                    }

                    IAsset workflowAsset = _context.Assets.Where(a => a.Id == workflowassetid).FirstOrDefault();

                    if (workflowAsset == null)
                    {
                        log.Info($"Workflow not found {workflowassetid}");
                        return(req.CreateResponse(HttpStatusCode.BadRequest, new
                        {
                            error = "Workflow not found"
                        }));
                    }

                    // Get a media processor reference, and pass to it the name of the
                    // processor to use for the specific task.
                    IMediaProcessor processorMEPW = MediaServicesHelper.GetLatestMediaProcessorByName(_context, "Media Encoder Premium Workflow");

                    string premiumConfiguration = "";
                    if (data.mepw != null && data.mepw.workflowConfig != null)
                    {
                        premiumConfiguration = (string)data.mepw.workflowConfig;
                    }
                    else if (data.workflowConfig != null)
                    {
                        premiumConfiguration = (string)data.workflowConfig; // compatibility mode
                    }

                    // In some cases, a configuration can be loaded and passed it to the task to tuned the workflow
                    // premiumConfiguration=File.ReadAllText(Path.Combine(System.IO.Directory.GetParent(execContext.FunctionDirectory).FullName, "presets", "SetRuntime.xml")).Replace("VideoFileName", VideoFile.Name).Replace("AudioFileName", AudioFile.Name);

                    // Create a task
                    taskEncoding = job.Tasks.AddNew("Premium Workflow encoding task",
                                                    processorMEPW,
                                                    premiumConfiguration,
                                                    TaskOptions.None);

                    log.Info("task created");

                    // Specify the input asset to be encoded.
                    taskEncoding.InputAssets.Add(workflowAsset); // first add the Workflow
                    taskEncoding.InputAssets.Add(asset);         // Then add the video asset
                    OutputMEPW = taskindex++;

                    // Add an output asset to contain the results of the job.
                    // This output is specified as AssetCreationOptions.None, which
                    // means the output asset is not encrypted.
                    outputEncoding = taskEncoding.OutputAssets.AddNew(asset.Name + " Premium encoded", JobHelpers.OutputStorageFromParam(data.mepw), AssetCreationOptions.None);
                }

                IAsset an_asset = useEncoderOutputForAnalytics ? outputEncoding : asset;

                // Media Analytics
                OutputIndex1          = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.indexV1 == null) ? (string)data.indexV1Language : ((string)data.indexV1.language ?? "English"), "Azure Media Indexer", "IndexerV1.xml", "English", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.indexV1));
                OutputIndex2          = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.indexV2 == null) ? (string)data.indexV2Language : ((string)data.indexV2.language ?? "EnUs"), "Azure Media Indexer 2 Preview", "IndexerV2.json", "EnUs", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.indexV2));
                OutputOCR             = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.ocr == null) ? (string)data.ocrLanguage : ((string)data.ocr.language ?? "AutoDetect"), "Azure Media OCR", "OCR.json", "AutoDetect", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.ocr));
                OutputFaceDetection   = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.faceDetection == null) ? (string)data.faceDetectionMode : ((string)data.faceDetection.mode ?? "PerFaceEmotion"), "Azure Media Face Detector", "FaceDetection.json", "PerFaceEmotion", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.faceDetection));
                OutputFaceRedaction   = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.faceRedaction == null) ? (string)data.faceRedactionMode : ((string)data.faceRedaction.mode ?? "comined"), "Azure Media Redactor", "FaceRedaction.json", "combined", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.faceRedaction));
                OutputMotion          = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.motionDetection == null) ? (string)data.motionDetectionLevel : ((string)data.motionDetection.level ?? "medium"), "Azure Media Motion Detector", "MotionDetection.json", "medium", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.motionDetection));
                OutputSummarization   = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.summarization == null) ? (string)data.summarizationDuration : ((string)data.summarization.duration ?? "0.0"), "Azure Media Video Thumbnails", "Summarization.json", "0.0", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.summarization));
                OutputVideoAnnotation = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.videoAnnotation != null) ? "1.0" : null, "Azure Media Video Annotator", "VideoAnnotation.json", "1.0", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.videoAnnotation));

                // MES Thumbnails
                OutputMesThumbnails = JobHelpers.AddTask(execContext, _context, job, asset, (data.mesThumbnails != null) ? ((string)data.mesThumbnails.Start ?? "{Best}") : null, "Media Encoder Standard", "MesThumbnails.json", "{Best}", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.mesThumbnails));

                job.Submit();
                log.Info("Job Submitted");
                NumberJobsQueue = _context.Jobs.Where(j => j.State == JobState.Queued).Count();
            }
            catch (Exception ex)
            {
                string message = ex.Message + ((ex.InnerException != null) ? Environment.NewLine + MediaServicesHelper.GetErrorMessage(ex) : "");
                log.Info($"ERROR: Exception {message}");
                return(req.CreateResponse(HttpStatusCode.InternalServerError, new { error = message }));
            }

            job = _context.Jobs.Where(j => j.Id == job.Id).FirstOrDefault(); // Let's refresh the job

            log.Info("Job Id: " + job.Id);
            log.Info("OutputAssetMESId: " + JobHelpers.ReturnId(job, OutputMES));
            log.Info("OutputAssetMEPWId: " + JobHelpers.ReturnId(job, OutputMEPW));
            log.Info("OutputAssetIndexV1Id: " + JobHelpers.ReturnId(job, OutputIndex1));
            log.Info("OutputAssetIndexV2Id: " + JobHelpers.ReturnId(job, OutputIndex2));
            log.Info("OutputAssetOCRId: " + JobHelpers.ReturnId(job, OutputOCR));
            log.Info("OutputAssetFaceDetectionId: " + JobHelpers.ReturnId(job, OutputFaceDetection));
            log.Info("OutputAssetFaceRedactionId: " + JobHelpers.ReturnId(job, OutputFaceRedaction));
            log.Info("OutputAssetMotionDetectionId: " + JobHelpers.ReturnId(job, OutputMotion));
            log.Info("OutputAssetSummarizationId: " + JobHelpers.ReturnId(job, OutputSummarization));
            log.Info("OutputMesThumbnailsId: " + JobHelpers.ReturnId(job, OutputMesThumbnails));
            log.Info("OutputAssetVideoAnnotationId: " + JobHelpers.ReturnId(job, OutputVideoAnnotation));

            return(req.CreateResponse(HttpStatusCode.OK, new
            {
                jobId = job.Id,
                otherJobsQueue = NumberJobsQueue,
                mes = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputMES),
                    taskId = JobHelpers.ReturnTaskId(job, OutputMES)
                },
                mepw = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputMEPW),
                    taskId = JobHelpers.ReturnTaskId(job, OutputMEPW)
                },
                indexV1 = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputIndex1),
                    taskId = JobHelpers.ReturnTaskId(job, OutputIndex1),
                    language = (string)data.indexV1Language
                },
                indexV2 = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputIndex2),
                    taskId = JobHelpers.ReturnTaskId(job, OutputIndex2),
                    language = (string)data.indexV2Language
                },
                ocr = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputOCR),
                    taskId = JobHelpers.ReturnTaskId(job, OutputOCR)
                },
                faceDetection = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputFaceDetection),
                    taskId = JobHelpers.ReturnTaskId(job, OutputFaceDetection)
                },
                faceRedaction = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputFaceRedaction),
                    taskId = JobHelpers.ReturnTaskId(job, OutputFaceRedaction)
                },
                motionDetection = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputMotion),
                    taskId = JobHelpers.ReturnTaskId(job, OutputMotion)
                },
                summarization = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputSummarization),
                    taskId = JobHelpers.ReturnTaskId(job, OutputSummarization)
                },
                mesThumbnails = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputMesThumbnails),
                    taskId = JobHelpers.ReturnTaskId(job, OutputMesThumbnails)
                },
                videoAnnotation = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputVideoAnnotation),
                    taskId = JobHelpers.ReturnTaskId(job, OutputVideoAnnotation)
                }
            }));
        }
Exemple #2
0
        public static async Task <object> Run([HttpTrigger(WebHookType = "genericJson")] HttpRequestMessage req, TraceWriter log, Microsoft.Azure.WebJobs.ExecutionContext execContext)
        {
            // Variables
            int taskindex               = 0;
            int OutputMES               = -1;
            int OutputPremium           = -1;
            int OutputIndex1            = -1;
            int OutputIndex2            = -1;
            int OutputOCR               = -1;
            int OutputFaceDetection     = -1;
            int OutputFaceRedaction     = -1;
            int OutputMotion            = -1;
            int OutputSummarization     = -1;
            int OutputHyperlapse        = -1;
            int OutputMesThumbnails     = -1;
            int OutputVideoAnnotation   = -1;
            int OutputContentModeration = -1;

            int    id               = 0;
            string programid        = "";
            string programName      = "";
            string channelName      = "";
            string programUrl       = "";
            string programState     = "";
            string lastProgramState = "";

            IJob  job             = null;
            ITask taskEncoding    = null;
            int   NumberJobsQueue = 0;

            int intervalsec = 60; // Interval for each subclip job (sec). Default is 60

            TimeSpan starttime = TimeSpan.FromSeconds(0);
            TimeSpan duration  = TimeSpan.FromSeconds(intervalsec);

            log.Info($"Webhook was triggered!");
            string triggerStart = DateTime.UtcNow.ToString("o");

            string jsonContent = await req.Content.ReadAsStringAsync();

            dynamic data = JsonConvert.DeserializeObject(jsonContent);

            log.Info(jsonContent);

            if (data.channelName == null || data.programName == null)
            {
                return(req.CreateResponse(HttpStatusCode.BadRequest, new
                {
                    error = "Please pass channel name and program name in the input object (channelName, programName)"
                }));
            }

            if (data.intervalSec != null)
            {
                intervalsec = (int)data.intervalSec;
            }

            MediaServicesCredentials amsCredentials = new MediaServicesCredentials();

            log.Info($"Using Azure Media Service Rest API Endpoint : {amsCredentials.AmsRestApiEndpoint}");

            try
            {
                AzureAdTokenCredentials tokenCredentials = new AzureAdTokenCredentials(amsCredentials.AmsAadTenantDomain,
                                                                                       new AzureAdClientSymmetricKey(amsCredentials.AmsClientId, amsCredentials.AmsClientSecret),
                                                                                       AzureEnvironments.AzureCloudEnvironment);

                AzureAdTokenProvider tokenProvider = new AzureAdTokenProvider(tokenCredentials);

                _context = new CloudMediaContext(amsCredentials.AmsRestApiEndpoint, tokenProvider);

                // find the Channel, Program and Asset
                channelName = (string)data.channelName;
                var channel = _context.Channels.Where(c => c.Name == channelName).FirstOrDefault();
                if (channel == null)
                {
                    log.Info("Channel not found");
                    return(req.CreateResponse(HttpStatusCode.BadRequest, new
                    {
                        error = "Channel not found"
                    }));
                }

                programName = (string)data.programName;
                var program = channel.Programs.Where(p => p.Name == programName).FirstOrDefault();
                if (program == null)
                {
                    log.Info("Program not found");
                    return(req.CreateResponse(HttpStatusCode.BadRequest, new
                    {
                        error = "Program not found"
                    }));
                }

                programState = program.State.ToString();
                programid    = program.Id;
                var asset = ManifestHelpers.GetAssetFromProgram(_context, programid);

                if (asset == null)
                {
                    log.Info($"Asset not found for program {programid}");
                    return(req.CreateResponse(HttpStatusCode.BadRequest, new
                    {
                        error = "Asset not found"
                    }));
                }

                log.Info($"Using asset Id : {asset.Id}");

                // Table storage to store and real the last timestamp processed
                // Retrieve the storage account from the connection string.
                CloudStorageAccount storageAccount = new CloudStorageAccount(new StorageCredentials(amsCredentials.StorageAccountName, amsCredentials.StorageAccountKey), true);

                // Create the table client.
                CloudTableClient tableClient = storageAccount.CreateCloudTableClient();

                // Retrieve a reference to the table.
                CloudTable table = tableClient.GetTableReference("liveanalytics");

                // Create the table if it doesn't exist.

                if (!table.CreateIfNotExists())
                {
                    log.Info($"Table {table.Name} already exists");
                }
                else
                {
                    log.Info($"Table {table.Name} created");
                }

                var lastendtimeInTable = ManifestHelpers.RetrieveLastEndTime(table, programid);

                // Get the manifest data (timestamps)
                var assetmanifestdata = ManifestHelpers.GetManifestTimingData(_context, asset, log);

                log.Info("Timestamps: " + string.Join(",", assetmanifestdata.TimestampList.Select(n => n.ToString()).ToArray()));

                var livetime = TimeSpan.FromSeconds((double)assetmanifestdata.TimestampEndLastChunk / (double)assetmanifestdata.TimeScale);

                log.Info($"Livetime: {livetime}");

                starttime = ManifestHelpers.ReturnTimeSpanOnGOP(assetmanifestdata, livetime.Subtract(TimeSpan.FromSeconds(intervalsec)));
                log.Info($"Value starttime : {starttime}");

                if (lastendtimeInTable != null)
                {
                    lastProgramState = lastendtimeInTable.ProgramState;
                    log.Info($"Value ProgramState retrieved : {lastProgramState}");

                    var lastendtimeInTableValue = TimeSpan.Parse(lastendtimeInTable.LastEndTime);
                    log.Info($"Value lastendtimeInTable retrieved : {lastendtimeInTableValue}");

                    id = int.Parse(lastendtimeInTable.Id);
                    log.Info($"Value id retrieved : {id}");

                    if (lastendtimeInTableValue != null)
                    {
                        var delta = (livetime - lastendtimeInTableValue - TimeSpan.FromSeconds(intervalsec)).Duration();
                        log.Info($"Delta: {delta}");

                        //if (delta < (new TimeSpan(0, 0, 3*intervalsec))) // less than 3 times the normal duration (3*60s)
                        if (delta < (TimeSpan.FromSeconds(3 * intervalsec))) // less than 3 times the normal duration (3*60s)
                        {
                            starttime = lastendtimeInTableValue;
                            log.Info($"Value new starttime : {starttime}");
                        }
                    }
                }

                duration = livetime - starttime;
                log.Info($"Value duration: {duration}");

                // D:\home\site\wwwroot\Presets\LiveSubclip.json
                string ConfigurationSubclip = File.ReadAllText(Path.Combine(System.IO.Directory.GetParent(execContext.FunctionDirectory).FullName, "presets", "LiveSubclip.json")).Replace("0:00:00.000000", starttime.Subtract(TimeSpan.FromMilliseconds(100)).ToString()).Replace("0:00:30.000000", duration.Add(TimeSpan.FromMilliseconds(200)).ToString());

                int priority = 10;
                if (data.priority != null)
                {
                    priority = (int)data.priority;
                }

                // MES Subclipping TASK
                // Declare a new encoding job with the Standard encoder
                job = _context.Jobs.Create("Azure Function - Job for Live Analytics - " + programName, priority);
                // Get a media processor reference, and pass to it the name of the
                // processor to use for the specific task.
                IMediaProcessor processor = MediaServicesHelper.GetLatestMediaProcessorByName(_context, "Media Encoder Standard");

                // Change or modify the custom preset JSON used here.
                // string preset = File.ReadAllText("D:\home\site\wwwroot\Presets\H264 Multiple Bitrate 720p.json");

                // Create a task with the encoding details, using a string preset.
                // In this case "H264 Multiple Bitrate 720p" system defined preset is used.
                taskEncoding = job.Tasks.AddNew("Subclipping task",
                                                processor,
                                                ConfigurationSubclip,
                                                TaskOptions.None);

                // Specify the input asset to be encoded.
                taskEncoding.InputAssets.Add(asset);
                OutputMES = taskindex++;

                // Add an output asset to contain the results of the job.
                // This output is specified as AssetCreationOptions.None, which
                // means the output asset is not encrypted.
                var subclipasset = taskEncoding.OutputAssets.AddNew(asset.Name + " subclipped " + triggerStart, JobHelpers.OutputStorageFromParam(data.mesSubclip), AssetCreationOptions.None);

                log.Info($"Adding media analytics tasks");

                /*
                 *      // Media Analytics
                 *      OutputIndex1 = JobHelpers.AddTask(job, subclipasset, (string)data.indexV1Language, "Azure Media Indexer", "IndexerV1.xml", "English", ref taskindex);
                 *      OutputIndex2 = JobHelpers.AddTask(job, subclipasset, (string)data.indexV2Language, "Azure Media Indexer 2 Preview", "IndexerV2.json", "EnUs", ref taskindex);
                 *      OutputOCR = JobHelpers.AddTask(job, subclipasset, (string)data.ocrLanguage, "Azure Media OCR", "OCR.json", "AutoDetect", ref taskindex);
                 *      OutputFaceDetection = JobHelpers.AddTask(job, subclipasset, (string)data.faceDetectionMode, "Azure Media Face Detector", "FaceDetection.json", "PerFaceEmotion", ref taskindex);
                 *      OutputFaceRedaction = JobHelpers.AddTask(job, subclipasset, (string)data.faceRedactionMode, "Azure Media Redactor", "FaceRedaction.json", "combined", ref taskindex, priority - 1);
                 *      OutputMotion = JobHelpers.AddTask(job, subclipasset, (string)data.motionDetectionLevel, "Azure Media Motion Detector", "MotionDetection.json", "medium", ref taskindex, priority - 1);
                 *      OutputSummarization = JobHelpers.AddTask(job, subclipasset, (string)data.summarizationDuration, "Azure Media Video Thumbnails", "Summarization.json", "0.0", ref taskindex);
                 *      OutputHyperlapse = JobHelpers.AddTask(job, subclipasset, (string)data.hyperlapseSpeed, "Azure Media Hyperlapse", "Hyperlapse.json", "8", ref taskindex);
                 *      OutputMesThumbnails = JobHelpers.AddTask(job, subclipasset, (string)data.mesThumbnailsStart, "Media Encoder Standard", "MesThumbnails.json", "{Best}", ref taskindex);
                 */

                //new
                OutputIndex1            = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.indexV1 == null) ? (string)data.indexV1Language : ((string)data.indexV1.language ?? "English"), "Azure Media Indexer", "IndexerV1.xml", "English", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.indexV1));
                OutputIndex2            = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.indexV2 == null) ? (string)data.indexV2Language : ((string)data.indexV2.language ?? "EnUs"), "Azure Media Indexer 2 Preview", "IndexerV2.json", "EnUs", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.indexV2));
                OutputOCR               = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.ocr == null) ? (string)data.ocrLanguage : ((string)data.ocr.language ?? "AutoDetect"), "Azure Media OCR", "OCR.json", "AutoDetect", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.ocr));
                OutputFaceDetection     = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.faceDetection == null) ? (string)data.faceDetectionMode : ((string)data.faceDetection.mode ?? "PerFaceEmotion"), "Azure Media Face Detector", "FaceDetection.json", "PerFaceEmotion", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.faceDetection));
                OutputFaceRedaction     = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.faceRedaction == null) ? (string)data.faceRedactionMode : ((string)data.faceRedaction.mode ?? "comined"), "Azure Media Redactor", "FaceRedaction.json", "combined", ref taskindex, priority - 1, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.faceRedaction));
                OutputMotion            = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.motionDetection == null) ? (string)data.motionDetectionLevel : ((string)data.motionDetection.level ?? "medium"), "Azure Media Motion Detector", "MotionDetection.json", "medium", ref taskindex, priority - 1, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.motionDetection));
                OutputSummarization     = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.summarization == null) ? (string)data.summarizationDuration : ((string)data.summarization.duration ?? "0.0"), "Azure Media Video Thumbnails", "Summarization.json", "0.0", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.summarization));
                OutputVideoAnnotation   = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.videoAnnotation != null) ? "1.0" : null, "Azure Media Video Annotator", "VideoAnnotation.json", "1.0", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.videoAnnotation));
                OutputContentModeration = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.contentModeration != null) ? "2.0" : null, "Azure Media Content Moderator", "ContentModeration.json", "2.0", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.contentModeration));

                // MES Thumbnails
                OutputMesThumbnails = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.mesThumbnails != null) ? ((string)data.mesThumbnails.Start ?? "{Best}") : null, "Media Encoder Standard", "MesThumbnails.json", "{Best}", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.mesThumbnails));

                // Hyperlapse
                OutputHyperlapse = JobHelpers.AddTask(execContext, _context, job, subclipasset, (data.hyperlapse == null) ? (string)data.hyperlapseSpeed : ((string)data.hyperlapse.speed ?? "8"), "Azure Media Hyperlapse", "Hyperlapse.json", "8", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.hyperlapse));

                job.Submit();
                log.Info("Job Submitted");

                id++;
                ManifestHelpers.UpdateLastEndTime(table, starttime + duration, programid, id, program.State);

                log.Info($"Output MES index {OutputMES}");

                // Let store some data in altid of subclipped asset
                var sid = JobHelpers.ReturnId(job, OutputMES);
                log.Info($"SID {sid}");
                var subclipassetrefreshed = _context.Assets.Where(a => a.Id == sid).FirstOrDefault();
                log.Info($"subclipassetrefreshed ID {subclipassetrefreshed.Id}");
                subclipassetrefreshed.AlternateId = JsonConvert.SerializeObject(new ManifestHelpers.SubclipInfo()
                {
                    programId = programid, subclipStart = starttime, subclipDuration = duration
                });
                subclipassetrefreshed.Update();

                // Let store some data in altid of index assets
                var index1sid = JobHelpers.ReturnId(job, OutputIndex1);
                if (index1sid != null)
                {
                    var index1assetrefreshed = _context.Assets.Where(a => a.Id == index1sid).FirstOrDefault();
                    log.Info($"index1assetrefreshed ID {index1assetrefreshed.Id}");
                    index1assetrefreshed.AlternateId = JsonConvert.SerializeObject(new ManifestHelpers.SubclipInfo()
                    {
                        programId = programid, subclipStart = starttime, subclipDuration = duration
                    });
                    index1assetrefreshed.Update();
                }

                var index2sid = JobHelpers.ReturnId(job, OutputIndex2);
                if (index2sid != null)
                {
                    var index2assetrefreshed = _context.Assets.Where(a => a.Id == index2sid).FirstOrDefault();
                    log.Info($"index2assetrefreshed ID {index2assetrefreshed.Id}");
                    index2assetrefreshed.AlternateId = JsonConvert.SerializeObject(new ManifestHelpers.SubclipInfo()
                    {
                        programId = programid, subclipStart = starttime, subclipDuration = duration
                    });
                    index2assetrefreshed.Update();
                }

                // Get program URL
                var publishurlsmooth = MediaServicesHelper.GetValidOnDemandURI(_context, asset);

                if (publishurlsmooth != null)
                {
                    programUrl = publishurlsmooth.ToString();
                }

                NumberJobsQueue = _context.Jobs.Where(j => j.State == JobState.Queued).Count();
            }
            catch (Exception ex)
            {
                string message = ex.Message + ((ex.InnerException != null) ? Environment.NewLine + MediaServicesHelper.GetErrorMessage(ex) : "");
                log.Info($"ERROR: Exception {message}");
                return(req.CreateResponse(HttpStatusCode.InternalServerError, new { error = message }));
            }

            log.Info("Job Id: " + job.Id);
            log.Info("Output asset Id: " + ((OutputMES > -1) ? JobHelpers.ReturnId(job, OutputMES) : JobHelpers.ReturnId(job, OutputPremium)));

            return(req.CreateResponse(HttpStatusCode.OK, new
            {
                triggerStart = triggerStart,
                jobId = job.Id,
                subclip = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputMES),
                    taskId = JobHelpers.ReturnTaskId(job, OutputMES),
                    start = starttime,
                    duration = duration,
                },
                mesThumbnails = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputMesThumbnails),
                    taskId = JobHelpers.ReturnTaskId(job, OutputMesThumbnails)
                },
                indexV1 = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputIndex1),
                    taskId = JobHelpers.ReturnTaskId(job, OutputIndex1),
                    language = (string)data.indexV1Language
                },
                indexV2 = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputIndex2),
                    taskId = JobHelpers.ReturnTaskId(job, OutputIndex2),
                    language = (string)data.indexV2Language,
                },
                ocr = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputOCR),
                    taskId = JobHelpers.ReturnTaskId(job, OutputOCR)
                },
                faceDetection = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputFaceDetection),
                    taskId = JobHelpers.ReturnTaskId(job, OutputFaceDetection)
                },
                faceRedaction = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputFaceRedaction),
                    taskId = JobHelpers.ReturnTaskId(job, OutputFaceRedaction)
                },
                motionDetection = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputMotion),
                    taskId = JobHelpers.ReturnTaskId(job, OutputMotion)
                },
                summarization = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputSummarization),
                    taskId = JobHelpers.ReturnTaskId(job, OutputSummarization)
                },
                hyperlapse = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputHyperlapse),
                    taskId = JobHelpers.ReturnTaskId(job, OutputHyperlapse)
                },
                videoAnnotation = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputVideoAnnotation),
                    taskId = JobHelpers.ReturnTaskId(job, OutputVideoAnnotation)
                },
                contentModeration = new
                {
                    assetId = JobHelpers.ReturnId(job, OutputContentModeration),
                    taskId = JobHelpers.ReturnTaskId(job, OutputContentModeration)
                },

                channelName = channelName,
                programName = programName,
                programId = programid,
                programUrl = programUrl,
                programState = programState,
                programStateChanged = (lastProgramState != programState).ToString(),
                otherJobsQueue = NumberJobsQueue
            }));
        }