public AssetInformation(Mainform mainform, CloudMediaContext context) { InitializeComponent(); this.Icon = Bitmaps.Azure_Explorer_ico; myMainForm = mainform; myContext = context; }
public Subclipping(CloudMediaContext context, MediaServiceContextForDynManifest contextdynmanifest, List<IAsset> assetlist, Mainform mainform) { InitializeComponent(); buttonJobOptions.Initialize(context); this.Icon = Bitmaps.Azure_Explorer_ico; _context = context; _contextdynmanifest = contextdynmanifest; _parentassetmanifestdata = new ManifestTimingData(); _selectedAssets = assetlist; _mainform = mainform; if (_selectedAssets.Count == 1 && _selectedAssets.FirstOrDefault() != null) // one asset only { var myAsset = assetlist.FirstOrDefault(); textBoxAssetName.Text = myAsset.Name; // let's try to read asset timing _parentassetmanifestdata = AssetInfo.GetManifestTimingData(myAsset); if (!_parentassetmanifestdata.Error) // we were able to read asset timings and not live { _timescale = timeControlStart.TimeScale = timeControlEnd.TimeScale = _parentassetmanifestdata.TimeScale; timeControlStart.ScaledFirstTimestampOffset = timeControlEnd.ScaledFirstTimestampOffset = _parentassetmanifestdata.TimestampOffset; textBoxOffset.Text = _parentassetmanifestdata.TimestampOffset.ToString(); labelOffset.Visible = textBoxOffset.Visible = true; textBoxFilterTimeScale.Text = _timescale.ToString(); textBoxFilterTimeScale.Visible = labelAssetTimescale.Visible = true; timeControlStart.Max = timeControlEnd.Max = new TimeSpan(AssetInfo.ReturnTimestampInTicks(_parentassetmanifestdata.AssetDuration, _parentassetmanifestdata.TimeScale)); labelassetduration.Visible = textBoxAssetDuration.Visible = true; textBoxAssetDuration.Text = timeControlStart.Max.ToString(@"d\.hh\:mm\:ss") + (_parentassetmanifestdata.IsLive ? " (LIVE)" : ""); // let set duration and active track bat timeControlStart.ScaledTotalDuration = timeControlEnd.ScaledTotalDuration = _parentassetmanifestdata.AssetDuration; timeControlStart.DisplayTrackBar = true; timeControlEnd.DisplayTrackBar = true; timeControlEnd.SetTimeStamp(timeControlEnd.Max); } else // one asset but not able to read asset timings { timeControlStart.DisplayTrackBar = timeControlEnd.DisplayTrackBar = false; timeControlStart.TimeScale = timeControlEnd.TimeScale = _timescale; timeControlStart.Max = timeControlEnd.Max = TimeSpan.MaxValue; //timeControlEnd.SetTimeStamp(timeControlEnd.Max); } } else // several assets { groupBoxTrimming.Enabled = panelAssetInfo.Visible = false; // no trimming and no asset info radioButtonAssetFilter.Enabled = false; // no asset filter option timeControlStart.DisplayTrackBar = timeControlEnd.DisplayTrackBar = false; timeControlStart.TimeScale = timeControlEnd.TimeScale = _timescale; timeControlStart.Max = timeControlEnd.Max = TimeSpan.MaxValue; //timeControlEnd.SetTimeStamp(timeControlEnd.Max); } }
public BulkContainerInfo(Mainform mainform, CloudMediaContext context, IIngestManifest manifest) { InitializeComponent(); this.Icon = Bitmaps.Azure_Explorer_ico; MyMainForm = mainform; MyContext = context; _manifest = manifest; }
public ProgramInformation(Mainform mainform, CloudMediaContext context, MediaServiceContextForDynManifest contextdynman) { InitializeComponent(); this.Icon = Bitmaps.Azure_Explorer_ico; MyMainForm = mainform; MyContext = context; MyDynManifestContext = contextdynman; }
public ChannelAdSlateControl(Mainform mainform) { InitializeComponent(); this.Icon = Bitmaps.Azure_Explorer_ico; MyMainForm = mainform; labelSlatePreviewInfoText = labelSlatePreviewInfo.Text; labelSlatePreviewInfo.Text = ""; }
private void WorkerUpdateIngestManifest_DoWork(object sender, DoWorkEventArgs e) { Debug.WriteLine("WorkerUpdateIngestManifest_DoWork"); BackgroundWorker worker = sender as BackgroundWorker; Mainform myform = (Mainform)this.FindForm(); while (true) { var manifestsupdated = _context.IngestManifests.ToList(); foreach (var im in _context.IngestManifests.AsEnumerable()) { var img = _MyObservIngestManifest.Where(i => i.Id == im.Id).FirstOrDefault(); if (img != null) { if (im.Statistics.PendingFilesCount == 0 && img.PendingFiles != im.Statistics.PendingFilesCount) { // Notify if upload completed for one bulk ingest container myform.Notify(string.Format("Bulk ingest completed with {0} error(s)", im.Statistics.ErrorFilesCount), string.Format("Container '{0}'", im.Name), im.Statistics.ErrorFilesCount > 0); myform.TextBoxLogWriteLine(string.Format("Bulk ingest on container '{0}' completed with {1} error(s)", im.Name, im.Statistics.ErrorFilesCount), im.Statistics.ErrorFilesCount > 0); myform.DoRefreshGridAssetV(false); } img.State = im.State; img.LastModified = im.LastModified.ToLocalTime().ToString("G"); img.PendingFiles = im.Statistics.PendingFilesCount; img.FinishedFiles = im.Statistics.FinishedFilesCount; if (im.Statistics.FinishedFilesCount + im.Statistics.PendingFilesCount == 0) { img.Progress = 101; } else { img.Progress = (float)im.Statistics.FinishedFilesCount / (float)(im.Statistics.FinishedFilesCount + im.Statistics.PendingFilesCount) * 100; } } } System.Threading.Thread.Sleep(10000); // 10s if (worker.CancellationPending == true) { e.Cancel = true; return; } } }
public static async Task<IOperation> ChannelExecuteOperationAsync(Func<TimeSpan, string, Task<IOperation>> fCall, TimeSpan ts, string s, IChannel channel, string strStatusSuccess, CloudMediaContext _context, Mainform mainform, DataGridViewLiveChannel dataGridViewChannelsV = null) //used for all except creation { IOperation operation = null; try { var state = channel.State; var STask = fCall(ts, s); operation = await STask; while (operation.State == OperationState.InProgress) { //refresh the operation operation = _context.Operations.GetOperation(operation.Id); // refresh the channel IChannel channelR = _context.Channels.Where(c => c.Id == channel.Id).FirstOrDefault(); if (channelR != null && state != channelR.State) { state = channelR.State; if (dataGridViewChannelsV != null) dataGridViewChannelsV.BeginInvoke(new Action(() => dataGridViewChannelsV.RefreshChannel(channelR)), null); } System.Threading.Thread.Sleep(1000); } if (operation.State == OperationState.Succeeded) { mainform.TextBoxLogWriteLine("Channel '{0}' {1}.", channel.Name, strStatusSuccess); } else { mainform.TextBoxLogWriteLine("Channel '{0}' NOT {1}. (Error {2})", channel.Name, strStatusSuccess, operation.ErrorCode, true); mainform.TextBoxLogWriteLine("Error message : {0}", operation.ErrorMessage, true); } if (dataGridViewChannelsV != null) dataGridViewChannelsV.BeginInvoke(new Action(() => dataGridViewChannelsV.RefreshChannel(channel)), null); } catch (Exception ex) { mainform.TextBoxLogWriteLine("Error with channel '{0}' : {1}", channel.Name, Program.GetErrorMessage(ex), true); } return operation; }
public ChannelInformation(Mainform mainform) { InitializeComponent(); this.Icon = Bitmaps.Azure_Explorer_ico; MyMainForm = mainform; }
public static string DoPlayBackWithStreamingEndpoint(PlayerType typeplayer, string Urlstr, CloudMediaContext context, Mainform mainForm, IAsset myasset = null, bool DoNotRewriteURL = false, string filter = null, AssetProtectionType keytype = AssetProtectionType.None, AzureMediaPlayerFormats formatamp = AzureMediaPlayerFormats.Auto, AzureMediaPlayerTechnologies technology = AzureMediaPlayerTechnologies.Auto, bool launchbrowser = true, bool UISelectSEFiltersAndProtocols = true) { string FullPlayBackLink = null; if (!string.IsNullOrEmpty(Urlstr)) { IStreamingEndpoint choosenSE = AssetInfo.GetBestStreamingEndpoint(context); string selectedBrowser = string.Empty; // Let's ask for SE if several SEs or Custom Host Names or Filters if (!DoNotRewriteURL) { if ( (myasset != null && UISelectSEFiltersAndProtocols) && (context.StreamingEndpoints.Count() > 1 || (context.StreamingEndpoints.FirstOrDefault() != null && context.StreamingEndpoints.FirstOrDefault().CustomHostNames.Count > 0) || context.Filters.Count() > 0 || (myasset.AssetFilters.Count() > 0)) ) { var form = new ChooseStreamingEndpoint(context, myasset, Urlstr, filter, typeplayer, true); if (form.ShowDialog() == DialogResult.OK) { Urlstr = AssetInfo.RW(new Uri(Urlstr), form.SelectStreamingEndpoint, form.SelectedFilters, form.ReturnHttps, form.ReturnSelectCustomHostName, form.ReturnStreamingProtocol, form.ReturnHLSAudioTrackName, form.ReturnHLSNoAudioOnlyMode).ToString(); choosenSE = form.SelectStreamingEndpoint; selectedBrowser = form.ReturnSelectedBrowser; } else { return string.Empty; } } else // no UI but let's rw for filter { if (typeplayer == PlayerType.DASHIFRefPlayer || typeplayer == PlayerType.DASHLiveAzure) { Urlstr = AssetInfo.RW(new Uri(Urlstr), choosenSE, filter, false, null, AMSOutputProtocols.Dash).ToString(); } else { Urlstr = RW(Urlstr, choosenSE, filter); } } } DynamicEncryption.TokenResult tokenresult = new DynamicEncryption.TokenResult(); if (myasset != null) { keytype = AssetInfo.GetAssetProtection(myasset, context); // let's save the protection scheme (use by azure player): AES, PlayReady, Widevine or PlayReadyAndWidevine if (DynamicEncryption.IsAssetHasAuthorizationPolicyWithToken(myasset, context)) // dynamic encryption with token { // user wants perhaps to play an asset with a token, so let's try to generate it switch (typeplayer) { case PlayerType.SilverlightPlayReadyToken: tokenresult = DynamicEncryption.GetTestToken(myasset, context, ContentKeyType.CommonEncryption); if (!string.IsNullOrEmpty(tokenresult.TokenString)) { tokenresult.TokenString = HttpUtility.UrlEncode(Constants.Bearer + tokenresult.TokenString); keytype = AssetProtectionType.PlayReady; } break; case PlayerType.FlashAESToken: tokenresult = DynamicEncryption.GetTestToken(myasset, context, ContentKeyType.EnvelopeEncryption); if (!string.IsNullOrEmpty(tokenresult.TokenString)) { tokenresult.TokenString = HttpUtility.UrlEncode(Constants.Bearer + tokenresult.TokenString); keytype = AssetProtectionType.AES; } break; case PlayerType.AzureMediaPlayer: case PlayerType.AzureMediaPlayerFrame: case PlayerType.CustomPlayer: switch (keytype) { case AssetProtectionType.None: break; case AssetProtectionType.AES: case AssetProtectionType.PlayReady: case AssetProtectionType.Widevine: case AssetProtectionType.PlayReadyAndWidevine: tokenresult = DynamicEncryption.GetTestToken(myasset, context, displayUI: true); if (!string.IsNullOrEmpty(tokenresult.TokenString)) { tokenresult.TokenString = HttpUtility.UrlEncode(Constants.Bearer + tokenresult.TokenString); //tokenresult.TokenString = Constants.Bearer + tokenresult.TokenString; } break; } break; default: // no token enabled player break; } } } // let's launch the player switch (typeplayer) { case PlayerType.AzureMediaPlayer: case PlayerType.AzureMediaPlayerFrame: /* string playerurl = typeplayer == PlayerType.AzureMediaPlayer ? Constants.PlayerAMPToLaunch : Constants.PlayerAMPIFrameToLaunch; */ string playerurl = ""; if (keytype != AssetProtectionType.None) { bool insertoken = !string.IsNullOrEmpty(tokenresult.TokenString); if (insertoken) // token. Let's analyse the token to find the drm technology used { switch (tokenresult.ContentKeyDeliveryType) { case ContentKeyDeliveryType.BaselineHttp: playerurl += string.Format(Constants.AMPAes, true.ToString()); playerurl += string.Format(Constants.AMPAesToken, tokenresult.TokenString); break; case ContentKeyDeliveryType.PlayReadyLicense: playerurl += string.Format(Constants.AMPPlayReady, true.ToString()); playerurl += string.Format(Constants.AMPPlayReadyToken, tokenresult.TokenString); break; case ContentKeyDeliveryType.Widevine: playerurl += string.Format(Constants.AMPWidevine, true.ToString()); playerurl += string.Format(Constants.AMPWidevineToken, tokenresult.TokenString); break; default: break; } } else // No token. Open mode. Let's look to the key to know the drm technology { switch (keytype) { case AssetProtectionType.AES: playerurl += string.Format(Constants.AMPAes, true.ToString()); break; case AssetProtectionType.PlayReady: playerurl += string.Format(Constants.AMPPlayReady, true.ToString()); break; case AssetProtectionType.Widevine: playerurl += string.Format(Constants.AMPWidevine, true.ToString()); break; case AssetProtectionType.PlayReadyAndWidevine: playerurl += string.Format(Constants.AMPPlayReady, true.ToString()); playerurl += string.Format(Constants.AMPWidevine, true.ToString()); break; default: break; } } } if (formatamp != AzureMediaPlayerFormats.Auto) { switch (formatamp) { case AzureMediaPlayerFormats.Dash: playerurl += string.Format(Constants.AMPformatsyntax, "dash"); break; case AzureMediaPlayerFormats.Smooth: playerurl += string.Format(Constants.AMPformatsyntax, "smooth"); break; case AzureMediaPlayerFormats.HLS: playerurl += string.Format(Constants.AMPformatsyntax, "hls"); break; case AzureMediaPlayerFormats.VideoMP4: playerurl += string.Format(Constants.AMPformatsyntax, "video/mp4"); break; default: // auto or other break; } if (tokenresult.TokenString != null) { playerurl += string.Format(Constants.AMPtokensyntax, tokenresult); } } else // format auto. If 0 Reserved Unit, and asset is smooth, let's force to smooth (player cannot get the dash stream for example) { if (choosenSE.ScaleUnits == 0 && myasset != null && myasset.AssetType == AssetType.SmoothStreaming) playerurl += string.Format(Constants.AMPformatsyntax, "smooth"); } if (technology != AzureMediaPlayerTechnologies.Auto) { switch (technology) { case AzureMediaPlayerTechnologies.Flash: playerurl += string.Format(Constants.AMPtechsyntax, "flash"); break; case AzureMediaPlayerTechnologies.JavaScript: playerurl += string.Format(Constants.AMPtechsyntax, "js"); break; case AzureMediaPlayerTechnologies.NativeHTML5: playerurl += string.Format(Constants.AMPtechsyntax, "html5"); break; case AzureMediaPlayerTechnologies.Silverlight: playerurl += string.Format(Constants.AMPtechsyntax, "silverlight"); break; default: // auto or other break; } } //FullPlayBackLink = string.Format(playerurl, HttpUtility.UrlEncode(Urlstr)); //FullPlayBackLink = HttpUtility.UrlEncode(string.Format(playerurl, Urlstr)); string playerurlbase = typeplayer == PlayerType.AzureMediaPlayer ? Constants.PlayerAMPToLaunch : Constants.PlayerAMPIFrameToLaunch; FullPlayBackLink = string.Format(playerurlbase, HttpUtility.UrlEncode(Urlstr)) + playerurl; break; case PlayerType.SilverlightMonitoring: FullPlayBackLink = string.Format(Constants.LinkSMFHealthToLaunch, HttpUtility.UrlEncode(Urlstr)); break; case PlayerType.SilverlightPlayReadyToken: FullPlayBackLink = string.Format(Constants.PlayerSLTokenToLaunch, HttpUtility.UrlEncode(Urlstr), tokenresult); break; case PlayerType.DASHIFRefPlayer: if (!Urlstr.Contains(string.Format(AssetInfo.format_url, AssetInfo.format_dash))) { Urlstr = AssetInfo.AddParameterToUrlString(Urlstr, string.Format(AssetInfo.format_url, AssetInfo.format_dash)); } FullPlayBackLink = string.Format(Constants.PlayerDASHIFToLaunch, Urlstr); break; case PlayerType.DASHAzurePage: FullPlayBackLink = string.Format(Constants.PlayerDASHAzurePage, HttpUtility.UrlEncode(Urlstr)); break; case PlayerType.DASHLiveAzure: if (!Urlstr.Contains(string.Format(AssetInfo.format_url, AssetInfo.format_dash))) { Urlstr = AssetInfo.AddParameterToUrlString(Urlstr, string.Format(AssetInfo.format_url, AssetInfo.format_dash)); } FullPlayBackLink = string.Format(Constants.PlayerDASHAzureToLaunch, Urlstr); break; case PlayerType.FlashAzurePage: FullPlayBackLink = string.Format(Constants.PlayerFlashAzurePage, HttpUtility.UrlEncode(Urlstr)); break; case PlayerType.FlashAESToken: FullPlayBackLink = string.Format(Constants.PlayerFlashAESToken, HttpUtility.UrlEncode(Urlstr), tokenresult); break; case PlayerType.MP4AzurePage: FullPlayBackLink = string.Format(Constants.PlayerMP4AzurePage, HttpUtility.UrlEncode(Urlstr)); break; case PlayerType.CustomPlayer: string myurl = Properties.Settings.Default.CustomPlayerUrl; FullPlayBackLink = myurl.Replace(Constants.NameconvManifestURL, HttpUtility.UrlEncode(Urlstr)).Replace(Constants.NameconvToken, tokenresult.TokenString); break; } if (FullPlayBackLink != null && launchbrowser) { try { if (string.IsNullOrEmpty(selectedBrowser)) { Process.Start(FullPlayBackLink); } else { if (selectedBrowser.Contains("edge")) { Process.Start(selectedBrowser + FullPlayBackLink); } else { Process.Start(selectedBrowser, FullPlayBackLink); } } } catch { mainForm.TextBoxLogWriteLine("Error when launching the browser.", true); } } } return FullPlayBackLink; }
public Subclipping(AMSClientV3 context, List <Asset> assetlist, Mainform mainform) { InitializeComponent(); this.Icon = Bitmaps.Azure_Explorer_ico; _amsClientV3 = context; _parentassetmanifestdata = new ManifestTimingData(); _selectedAssets = assetlist; _mainform = mainform; buttonShowEDL.Initialize(); buttonShowEDL.EDLChanged += ButtonShowEDL_EDLChanged; buttonShowEDL.Offset = new TimeSpan(0); // temp locator creation if (_selectedAssets.Count == 1 && MessageBox.Show("A temporary clear locator of 1 hour is going to be created to access content timing information. It will be deleted when you close the subclipping window.", "Locator creation", MessageBoxButtons.OKCancel, MessageBoxIcon.Information) == DialogResult.OK) { try { _tempStreamingLocator = Task.Run(() => AssetInfo.CreateTemporaryOnDemandLocatorAsync(_selectedAssets.First(), _amsClientV3)).GetAwaiter().GetResult(); } catch { } } if (_selectedAssets.Count == 1 && _selectedAssets.FirstOrDefault() != null) // one asset only { var myAsset = assetlist.FirstOrDefault(); textBoxAssetName.Text = myAsset.Name; // let's try to read asset timing _parentassetmanifestdata = Task.Run(() => AssetInfo.GetManifestTimingDataAsync(myAsset, _amsClientV3, _tempStreamingLocator?.Name)).GetAwaiter().GetResult(); labelDiscountinuity.Visible = _parentassetmanifestdata.DiscontinuityDetected; if (!_parentassetmanifestdata.Error) // we were able to read asset timings and not live { _timescale = timeControlStart.TimeScale = timeControlEnd.TimeScale = _parentassetmanifestdata.TimeScale; timeControlStart.ScaledFirstTimestampOffset = timeControlEnd.ScaledFirstTimestampOffset = _parentassetmanifestdata.TimestampOffset; buttonShowEDL.Offset = timeControlStart.GetOffSetAsTimeSpan(); textBoxOffset.Text = _parentassetmanifestdata.TimestampOffset.ToString(); labelOffset.Visible = textBoxOffset.Visible = true; textBoxFilterTimeScale.Text = _timescale.ToString(); textBoxFilterTimeScale.Visible = labelAssetTimescale.Visible = true; timeControlStart.Max = timeControlEnd.Max = _parentassetmanifestdata.AssetDuration; labelassetduration.Visible = textBoxAssetDuration.Visible = true; textBoxAssetDuration.Text = timeControlStart.Max.ToString(@"d\.hh\:mm\:ss") + (_parentassetmanifestdata.IsLive ? " (LIVE)" : ""); // let set duration and active track bat timeControlStart.TotalDuration = timeControlEnd.TotalDuration = _parentassetmanifestdata.AssetDuration; timeControlStart.DisplayTrackBar = true; timeControlEnd.DisplayTrackBar = true; timeControlEnd.SetTimeStamp(timeControlEnd.Max); } else // one asset but not able to read asset timings { timeControlStart.DisplayTrackBar = timeControlEnd.DisplayTrackBar = false; timeControlStart.TimeScale = timeControlEnd.TimeScale = _timescale; timeControlStart.Max = timeControlEnd.Max = TimeSpan.MaxValue; //timeControlEnd.SetTimeStamp(timeControlEnd.Max); } } else // several assets { groupBoxTrimming.Enabled = panelAssetInfo.Visible = false; // no trimming and no asset info radioButtonAssetFilter.Enabled = false; // no asset filter option timeControlStart.DisplayTrackBar = timeControlEnd.DisplayTrackBar = false; timeControlStart.TimeScale = timeControlEnd.TimeScale = _timescale; timeControlStart.Max = timeControlEnd.Max = TimeSpan.MaxValue; //timeControlEnd.SetTimeStamp(timeControlEnd.Max); } }
private void DoSubClip() { var subclipConfig = this.GetSubclippingConfiguration(); if (subclipConfig.Reencode) // reencode the clip { List <IMediaProcessor> Procs = Mainform.GetMediaProcessorsByName(Constants.AzureMediaEncoderStandard); EncodingAMEStandard form2 = new EncodingAMEStandard(_context, _selectedAssets.Count, subclipConfig) { EncodingLabel = (_selectedAssets.Count > 1) ? string.Format("{0} asset{1} selected. You are going to submit {0} job{1} with 1 task.", _selectedAssets.Count, Program.ReturnS(_selectedAssets.Count), _selectedAssets.Count) : "Asset '" + _selectedAssets.FirstOrDefault().Name + "' will be encoded (1 job with 1 task).", EncodingProcessorsList = Procs, EncodingJobName = "Subclipping with reencoding of " + Constants.NameconvInputasset, EncodingOutputAssetName = Constants.NameconvInputasset + "- Subclipped with reencoding", EncodingAMEStdPresetJSONFilesUserFolder = Properties.Settings.Default.AMEStandardPresetXMLFilesCurrentFolder, EncodingAMEStdPresetJSONFilesFolder = Application.StartupPath + Constants.PathAMEStdFiles, SelectedAssets = _selectedAssets }; if (form2.ShowDialog() == System.Windows.Forms.DialogResult.OK) { string taskname = "Subclipping with reencoding of " + Constants.NameconvInputasset + " with " + Constants.NameconvEncodername; _mainform.LaunchJobs( form2.EncodingProcessorSelected, _selectedAssets, form2.EncodingJobName, form2.JobOptions.Priority, taskname, form2.EncodingOutputAssetName, new List <string>() { form2.EncodingConfiguration }, form2.JobOptions.OutputAssetsCreationOptions, form2.JobOptions.TasksOptionsSetting, form2.JobOptions.StorageSelected); } } else if (subclipConfig.CreateAssetFilter) // create a asset filter { IAsset selasset = _selectedAssets.FirstOrDefault(); DynManifestFilter formAF = new DynManifestFilter(_context, null, selasset, subclipConfig); if (formAF.ShowDialog() == DialogResult.OK) { FilterCreationInfo filterinfo = null; try { filterinfo = formAF.GetFilterInfo; selasset.AssetFilters.Create(filterinfo.Name, filterinfo.Presentationtimerange, filterinfo.Trackconditions); _mainform.TextBoxLogWriteLine("Asset filter '{0}' created.", filterinfo.Name); } catch (Exception ex) { _mainform.TextBoxLogWriteLine("Error when creating filter '{0}'.", (filterinfo != null && filterinfo.Name != null) ? filterinfo.Name : "unknown name", true); _mainform.TextBoxLogWriteLine(ex); } _mainform.DoRefreshGridFiltersV(false); } } else // no reencode or asset filter but stream copy { string taskname = "Subclipping (archive extraction) of " + Constants.NameconvInputasset; IMediaProcessor Proc = Mainform.GetLatestMediaProcessorByName(Constants.AzureMediaEncoderStandard); _mainform.LaunchJobs( Proc, _selectedAssets, this.EncodingJobName, this.JobOptions.Priority, taskname, this.EncodingOutputAssetName, new List <string>() { this.GetSubclippingConfiguration().Configuration }, this.JobOptions.OutputAssetsCreationOptions, this.JobOptions.TasksOptionsSetting, this.JobOptions.StorageSelected); MessageBox.Show("Subclipping job(s) submitted", "Sublipping", MessageBoxButtons.OK, MessageBoxIcon.Information); } }
public static async Task<IOperation> ChannelExecuteOperationAsync(Func<Task<IOperation>> fCall, IChannel channel, string strStatusSuccess, CloudMediaContext _context, Mainform mainform, DataGridViewLiveChannel dataGridViewChannelsV = null) //used for all except creation { IOperation operation = null; if (channel != null) { try { var state = channel.State; var STask = fCall(); operation = await STask; while (operation.State == OperationState.InProgress) { //refresh the operation operation = _context.Operations.GetOperation(operation.Id); // refresh the channel IChannel channelR = _context.Channels.Where(c => c.Id == channel.Id).FirstOrDefault(); if (channelR != null && state != channelR.State) { state = channelR.State; if (dataGridViewChannelsV != null) dataGridViewChannelsV.BeginInvoke(new Action(() => dataGridViewChannelsV.RefreshChannel(channelR)), null); } System.Threading.Thread.Sleep(1000); } if (operation.State == OperationState.Succeeded) { mainform.TextBoxLogWriteLine("Channel '{0}' : {1}.", channel.Name, strStatusSuccess); IChannel channelR = _context.Channels.Where(c => c.Id == channel.Id).FirstOrDefault(); // we display a notification is taskbar for channel started or reset if (channelR != null && (strStatusSuccess == "started" || strStatusSuccess == "reset")) { mainform.BeginInvoke(new Action(() => { mainform.Notify("Channel " + strStatusSuccess, string.Format("{0}", channelR.Name), false); })); } } else { mainform.TextBoxLogWriteLine("Channel '{0}' NOT {1}. (Error {2})", channel.Name, strStatusSuccess, operation.ErrorCode, true); mainform.TextBoxLogWriteLine("Error message : {0}", operation.ErrorMessage, true); } if (dataGridViewChannelsV != null) dataGridViewChannelsV.BeginInvoke(new Action(() => dataGridViewChannelsV.RefreshChannel(channel)), null); } catch (Exception ex) { mainform.TextBoxLogWriteLine("Channel '{0}' : Error! {1}", channel.Name, Program.GetErrorMessage(ex), true); } } return operation; }
public Subclipping(CloudMediaContext context, List <IAsset> assetlist, Mainform mainform) { InitializeComponent(); buttonJobOptions.Initialize(context); this.Icon = Bitmaps.Azure_Explorer_ico; _context = context; _parentassetmanifestdata = new ManifestTimingData(); _selectedAssets = assetlist; _mainform = mainform; buttonShowEDL.Initialize(); buttonShowEDL.EDLChanged += ButtonShowEDL_EDLChanged; buttonShowEDL.Offset = new TimeSpan(0); if (_selectedAssets.Count == 1 && _selectedAssets.FirstOrDefault() != null) // one asset only { var myAsset = assetlist.FirstOrDefault(); textBoxAssetName.Text = myAsset.Name; // let's try to read asset timing _parentassetmanifestdata = AssetInfo.GetManifestTimingData(myAsset); labelDiscountinuity.Visible = _parentassetmanifestdata.DiscontinuityDetected; if (!_parentassetmanifestdata.Error) // we were able to read asset timings and not live { _timescale = timeControlStart.TimeScale = timeControlEnd.TimeScale = _parentassetmanifestdata.TimeScale; timeControlStart.ScaledFirstTimestampOffset = timeControlEnd.ScaledFirstTimestampOffset = _parentassetmanifestdata.TimestampOffset; buttonShowEDL.Offset = timeControlStart.GetOffSetAsTimeSpan(); textBoxOffset.Text = _parentassetmanifestdata.TimestampOffset.ToString(); labelOffset.Visible = textBoxOffset.Visible = true; textBoxFilterTimeScale.Text = _timescale.ToString(); textBoxFilterTimeScale.Visible = labelAssetTimescale.Visible = true; timeControlStart.Max = timeControlEnd.Max = _parentassetmanifestdata.AssetDuration; labelassetduration.Visible = textBoxAssetDuration.Visible = true; textBoxAssetDuration.Text = timeControlStart.Max.ToString(@"d\.hh\:mm\:ss") + (_parentassetmanifestdata.IsLive ? " (LIVE)" : ""); // let set duration and active track bat timeControlStart.TotalDuration = timeControlEnd.TotalDuration = _parentassetmanifestdata.AssetDuration; timeControlStart.DisplayTrackBar = true; timeControlEnd.DisplayTrackBar = true; timeControlEnd.SetTimeStamp(timeControlEnd.Max); } else // one asset but not able to read asset timings { timeControlStart.DisplayTrackBar = timeControlEnd.DisplayTrackBar = false; timeControlStart.TimeScale = timeControlEnd.TimeScale = _timescale; timeControlStart.Max = timeControlEnd.Max = TimeSpan.MaxValue; //timeControlEnd.SetTimeStamp(timeControlEnd.Max); } } else // several assets { groupBoxTrimming.Enabled = panelAssetInfo.Visible = false; // no trimming and no asset info radioButtonAssetFilter.Enabled = false; // no asset filter option timeControlStart.DisplayTrackBar = timeControlEnd.DisplayTrackBar = false; timeControlStart.TimeScale = timeControlEnd.TimeScale = _timescale; timeControlStart.Max = timeControlEnd.Max = TimeSpan.MaxValue; //timeControlEnd.SetTimeStamp(timeControlEnd.Max); } }
private void DoSubClip() { var subclipConfig = this.GetSubclippingConfiguration(); if (subclipConfig.Reencode) // reencode the clip { List <IMediaProcessor> Procs = Mainform.GetMediaProcessorsByName(Constants.AzureMediaEncoderStandard); EncodingAMEStandard form2 = new EncodingAMEStandard(_context, subclipConfig) { EncodingLabel = (_selectedAssets.Count > 1) ? _selectedAssets.Count + " assets have been selected. " + _selectedAssets.Count + " jobs will be submitted." : "Asset '" + _selectedAssets.FirstOrDefault().Name + "' will be encoded.", EncodingProcessorsList = Procs, EncodingJobName = "Subclipping with reencoding of " + Constants.NameconvInputasset, EncodingOutputAssetName = Constants.NameconvInputasset + "- Subclipped with reencoding", EncodingAMEStdPresetJSONFilesUserFolder = Properties.Settings.Default.AMEStandardPresetXMLFilesCurrentFolder, EncodingAMEStdPresetJSONFilesFolder = Application.StartupPath + Constants.PathAMEStdFiles, SelectedAssets = _selectedAssets }; if (form2.ShowDialog() == System.Windows.Forms.DialogResult.OK) { string taskname = "Subclipping with reencoding of " + Constants.NameconvInputasset + " with " + Constants.NameconvEncodername; _mainform.LaunchJobs( form2.EncodingProcessorSelected, _selectedAssets, form2.EncodingJobName, form2.JobOptions.Priority, taskname, form2.EncodingOutputAssetName, new List <string>() { form2.EncodingConfiguration }, form2.JobOptions.OutputAssetsCreationOptions, form2.JobOptions.TasksOptionsSetting, form2.JobOptions.StorageSelected); } } else if (subclipConfig.CreateAssetFilter) // create a asset filter { IAsset selasset = _selectedAssets.FirstOrDefault(); DynManifestFilter formAF = new DynManifestFilter(_contextdynmanifest, _context, null, selasset, subclipConfig); if (formAF.ShowDialog() == DialogResult.OK) { AssetFilter myassetfilter = new AssetFilter(selasset); Filter filter = formAF.GetFilter; myassetfilter.Name = filter.Name; myassetfilter.PresentationTimeRange = filter.PresentationTimeRange; myassetfilter.Tracks = filter.Tracks; myassetfilter._context = filter._context; try { myassetfilter.Create(); _mainform.TextBoxLogWriteLine("Asset filter '{0}' created.", myassetfilter.Name); } catch (Exception ex) { _mainform.TextBoxLogWriteLine("Error when creating filter '{0}'.", myassetfilter.Name, true); _mainform.TextBoxLogWriteLine(ex); } _mainform.DoRefreshGridFiltersV(false); } } else // no reencode or asset filter but stream copy { string taskname = "Subclipping of " + Constants.NameconvInputasset + " with " + Constants.NameconvEncodername; IMediaProcessor Proc = Mainform.GetLatestMediaProcessorByName(Constants.AzureMediaEncoderStandard); _mainform.LaunchJobs( Proc, _selectedAssets, this.EncodingJobName, this.JobOptions.Priority, taskname, this.EncodingOutputAssetName, new List <string>() { this.GetSubclippingConfiguration().Configuration }, this.JobOptions.OutputAssetsCreationOptions, this.JobOptions.TasksOptionsSetting, this.JobOptions.StorageSelected); MessageBox.Show("Subclipping job(s) submitted", "Sublipping", MessageBoxButtons.OK, MessageBoxIcon.Information); } }
public static async Task <IOperation> ChannelExecuteOperationAsync(Func <TimeSpan, string, Task <IOperation> > fCall, TimeSpan ts, string s, IChannel channel, string strStatusSuccess, CloudMediaContext _context, Mainform mainform, DataGridViewLiveChannel dataGridViewChannelsV = null) //used for all except creation { IOperation operation = null; try { var state = channel.State; var STask = fCall(ts, s); operation = await STask; while (operation.State == OperationState.InProgress) { //refresh the operation operation = _context.Operations.GetOperation(operation.Id); // refresh the channel IChannel channelR = _context.Channels.Where(c => c.Id == channel.Id).FirstOrDefault(); if (channelR != null && state != channelR.State) { state = channelR.State; if (dataGridViewChannelsV != null) { dataGridViewChannelsV.BeginInvoke(new Action(() => dataGridViewChannelsV.RefreshChannel(channelR)), null); } } System.Threading.Thread.Sleep(1000); } if (operation.State == OperationState.Succeeded) { mainform.TextBoxLogWriteLine("Channel '{0}' : {1}.", channel.Name, strStatusSuccess); } else { mainform.TextBoxLogWriteLine("Channel '{0}' NOT {1}. (Error {2})", channel.Name, strStatusSuccess, operation.ErrorCode, true); mainform.TextBoxLogWriteLine("Error message : {0}", operation.ErrorMessage, true); } if (dataGridViewChannelsV != null) { dataGridViewChannelsV.BeginInvoke(new Action(() => dataGridViewChannelsV.RefreshChannel(channel)), null); } } catch (Exception ex) { mainform.TextBoxLogWriteLine("Channel '{0}' : Error! {1}", channel.Name, Program.GetErrorMessage(ex), true); } return(operation); }
public void Initialize(IAsset asset, Mainform main, bool polygonsEnabled, int nbOfRegionsMax, bool croppingMode, string title = null) { myRegionEditor = new RegionEditor(asset, polygonsEnabled, nbOfRegionsMax, croppingMode, title); _asset = asset; _main = main; }
public MediaAnalyticsVideoOCR(CloudMediaContext context, string version, IAsset firstAsset, Mainform main) { InitializeComponent(); this.Icon = Bitmaps.Azure_Explorer_ico; _context = context; _version = version; _firstAsset = firstAsset; buttonRegionEditor.Initialize(_firstAsset, main, false, 8, false); buttonRegionEditor.RegionsChanged += buttonRegionEditor_RegionsChanged; buttonJobOptions.Initialize(_context); }
public void DoJobProgress(JobExtension job) { var tokenSource = new CancellationTokenSource(); var token = tokenSource.Token; _MyListJobsMonitored.Add(job.Job.Name, tokenSource); // to track the task and be able to cancel it later Debug.WriteLine("launch job monitor : " + job.Job.Name); _client.RefreshTokenIfNeeded(); Task.Run(() => { try { Job myJob = null; do { myJob = _client.AMSclient.Jobs.Get(_client.credentialsEntry.ResourceGroup, _client.credentialsEntry.AccountName, job.TransformName, job.Job.Name); if (token.IsCancellationRequested == true) { return; } int index = -1; foreach (JobEntryV3 je in _MyObservJobV3) // let's search for index { if (je.Name == myJob.Name) { index = _MyObservJobV3.IndexOf(je); break; } } if (index >= 0) // we found it { // we update the observation collection var progress = ReturnProgressJob(myJob); _MyObservJobV3[index].Progress = progress.progress; _MyObservJobV3[index].Priority = myJob.Priority; //_MyObservJobV3[index].StartTime = myJob...StartTime.HasValue ? ((DateTime)myJob.StartTime).ToLocalTime().ToString("G") : null; //_MyObservJobV3[index].EndTime = myJob.EndTime.HasValue ? ((DateTime)myJob.EndTime).ToLocalTime().ToString("G") : null; _MyObservJobV3[index].State = myJob.State; /* * // let's calculate the estipated time * string ETAstr = "", Durationstr = ""; * if (progress > 3) * { * DateTime startlocaltime = ((DateTime)myJob.StartTime).ToLocalTime(); * TimeSpan interval = (TimeSpan)(DateTime.Now - startlocaltime); * DateTime ETA = DateTime.Now.AddSeconds((100d / progress - 1d) * interval.TotalSeconds); * TimeSpan estimatedduration = (TimeSpan)(ETA - startlocaltime); * * ETAstr = "Estimated: " + ETA.ToString("G"); * Durationstr = "Estimated: " + estimatedduration.ToString(@"d\.hh\:mm\:ss"); * _MyObservJobV3[index].EndTime = ETA.ToString(@"G") + " ?"; * _MyObservJobV3[index].Duration = myJob.EndTime.HasValue ? * ((TimeSpan)((DateTime)myJob.EndTime - (DateTime)myJob.StartTime)).ToString(@"d\.hh\:mm\:ss") * : estimatedduration.ToString(@"d\.hh\:mm\:ss") + " ?"; * } */ int indexdisplayed = -1; foreach (JobEntryV3 je in _MyObservJobV3) // let's search for index in the page { if (je.Name == myJob.Name) { indexdisplayed = _MyObservJobV3.IndexOf(je); try { this.BeginInvoke(new Action(() => { this.Rows[indexdisplayed].Cells[this.Columns["Progress"].Index].ToolTipText = progress.sb.ToString(); // mouse hover info if (progress.progress != 0) { // this.Rows[indexdisplayed].Cells[this.Columns["EndTime"].Index].ToolTipText = ETAstr;// mouse hover info // this.Rows[indexdisplayed].Cells[this.Columns["Duration"].Index].ToolTipText = Durationstr;// mouse hover info } this.Refresh(); })); } catch { } break; } } } if (myJob != null && myJob.State != Microsoft.Azure.Management.Media.Models.JobState.Finished && myJob.State != Microsoft.Azure.Management.Media.Models.JobState.Error && myJob.State != Microsoft.Azure.Management.Media.Models.JobState.Canceled) { Debug.WriteLine("wait for status : " + myJob.Name); Task.Delay(JobRefreshIntervalInMilliseconds).Wait(); } else { break; } }while (myJob.State != Microsoft.Azure.Management.Media.Models.JobState.Finished && myJob.State != Microsoft.Azure.Management.Media.Models.JobState.Error && myJob.State != Microsoft.Azure.Management.Media.Models.JobState.Canceled); // job finished _client.RefreshTokenIfNeeded(); myJob = _client.AMSclient.Jobs.Get(_client.credentialsEntry.ResourceGroup, _client.credentialsEntry.AccountName, job.TransformName, job.Job.Name); int index2 = -1; foreach (JobEntryV3 je in _MyObservJobV3) // let's search for index { if (je.Name == myJob.Name) { index2 = _MyObservJobV3.IndexOf(je); break; } } if (index2 >= 0) // we found it { // we update the observation collection StringBuilder sb2 = new StringBuilder(); // display percentage for each task for mouse hover (tooltiptext) double progress2 = 0; for (int i = 0; i < myJob.Outputs.Count; i++) { JobOutput output = myJob.Outputs[i]; if (output.State == Microsoft.Azure.Management.Media.Models.JobState.Processing) { progress2 += output.Progress; sb2.AppendLine(string.Format("{0} % ({1})", Convert.ToInt32(output.Progress).ToString(), output.Label)); } } if (myJob.Outputs.Count > 0) { progress2 = progress2 / myJob.Outputs.Count; } _MyObservJobV3[index2].Progress = 101d; // progress; we don't want the progress bar to be displayed _MyObservJobV3[index2].Priority = myJob.Priority; _MyObservJobV3[index2].State = myJob.State; if (_MyListJobsMonitored.ContainsKey(myJob.Name)) // we want to display only one time { _MyListJobsMonitored.Remove(myJob.Name); // let's remove from the list of monitored jobs Mainform myform = (Mainform)this.FindForm(); // string status = Enum.GetName(typeof(Microsoft.Azure.Management.Media.Models.JobState), myJob.State).ToLower(); string status = myJob.State.ToString(); myform.BeginInvoke(new Action(() => { myform.Notify(string.Format("Job {0}", status), string.Format("Job {0}", _MyObservJobV3[index2].Name), myJob.State == Microsoft.Azure.Management.Media.Models.JobState.Error); myform.TextBoxLogWriteLine(string.Format("Job '{0}' : {1}.", _MyObservJobV3[index2].Name, status), myJob.State == Microsoft.Azure.Management.Media.Models.JobState.Error); if (myJob.State == Microsoft.Azure.Management.Media.Models.JobState.Error) { foreach (var output in myJob.Outputs) { if (output.Error != null && output.Error.Details != null) { for (int i = 0; i < output.Error.Details.Count(); i++) { myform.TextBoxLogWriteLine(string.Format("Output '{0}', Error : {1}", output.Label, output.Error + " : " + output.Error.Message), true); } } } } myform.DoRefreshGridAssetV(false); })); this.BeginInvoke(new Action(() => { this.Refresh(); })); } } } catch (Exception ex) { //MessageBox.Show(Program.GetErrorMessage(e), "Job Monitoring Error"); Debug.WriteLine("error job monitor : " + Program.GetErrorMessage(ex)); } }, token); }