/// <summary> /// Handles the MediaChanging event of the MediaControl. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="MediaOpeningEventArgs"/> instance containing the event data.</param> private void OnMediaChanging(object sender, MediaOpeningEventArgs e) { var availableStreams = e.Info.Streams .Where(s => s.Value.CodecType == (AVMediaType)StreamCycleMediaType) .Select(x => x.Value) .ToList(); if (availableStreams.Count <= 0) { return; } // Allow cyclling though a null stream (means removing the stream) // Except for video streams. if (StreamCycleMediaType != MediaType.Video) { availableStreams.Add(null); } var currentIndex = -1; switch (StreamCycleMediaType) { case MediaType.Audio: currentIndex = availableStreams.IndexOf(e.Options.AudioStream); break; case MediaType.Video: currentIndex = availableStreams.IndexOf(e.Options.VideoStream); break; case MediaType.Subtitle: currentIndex = availableStreams.IndexOf(e.Options.SubtitleStream); break; } currentIndex += 1; if (currentIndex >= availableStreams.Count) { currentIndex = 0; } var newStream = availableStreams[currentIndex]; switch (StreamCycleMediaType) { case MediaType.Audio: e.Options.AudioStream = newStream; break; case MediaType.Video: e.Options.VideoStream = newStream; break; case MediaType.Subtitle: e.Options.SubtitleStream = newStream; break; } }
private void HandleChangeAudioLanguage(MediaOpeningEventArgs e) { string currentLang = player.GetSelectedAudio()?.Language; if (currentLang.ToLower() == selectedlang.ToLower()) { return; } string langAbbreviation = episode.GetLanguageAbbreviation(selectedlang); var newStream = e.Info.Streams.FirstOrDefault(stream => stream.Value.Language == langAbbreviation); if (newStream.Value == null) { throw new Exception("failed to find audio stream for the language: " + langAbbreviation); } e.Options.AudioStream = newStream.Value; }
/// <summary> /// Handles the MediaOpening event of the Media control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="MediaOpeningEventArgs"/> instance containing the event data.</param> private void OnMediaOpening(object sender, MediaOpeningEventArgs e) { const string SideLoadAspect = "Client.SideLoad"; // You can start off by adjusting subtitles delay // e.Options.SubtitlesDelay = TimeSpan.FromSeconds(7); // See issue #216 // Get the local file path from the URL (if possible) var mediaFilePath = string.Empty; try { var url = new Uri(e.Info.InputUrl); mediaFilePath = url.IsFile || url.IsUnc ? Path.GetFullPath(url.LocalPath) : string.Empty; } catch { /* Ignore Exceptions */ } // Example of automatically side-loading SRT subs if (string.IsNullOrWhiteSpace(mediaFilePath) == false) { var srtFilePath = Path.ChangeExtension(mediaFilePath, "srt"); if (File.Exists(srtFilePath)) { e.Options.SubtitlesUrl = srtFilePath; } } // You can force video FPS if necessary // see: https://github.com/unosquare/ffmediaelement/issues/212 // e.Options.VideoForcedFps = 25; // An example of specifically selecting a subtitle stream var subtitleStreams = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_SUBTITLE).Select(kvp => kvp.Value); var englishSubtitleStream = subtitleStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en")); if (englishSubtitleStream != null) { e.Options.SubtitleStream = englishSubtitleStream; } // An example of specifically selecting an audio stream var audioStreams = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_AUDIO).Select(kvp => kvp.Value); var englishAudioStream = audioStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en")); if (englishAudioStream != null) { e.Options.AudioStream = englishAudioStream; } // Setting Advanced Video Stream Options is also possible // ReSharper disable once InvertIf if (e.Options.VideoStream is StreamInfo videoStream) { // If we have a valid seek index let's use it! if (string.IsNullOrWhiteSpace(mediaFilePath) == false) { try { // Try to Create or Load a Seek Index var durationSeconds = e.Info.Duration.TotalSeconds > 0 ? e.Info.Duration.TotalSeconds : 0; var seekIndex = LoadOrCreateVideoSeekIndex(mediaFilePath, videoStream.StreamIndex, durationSeconds); // Make sure the seek index belongs to the media file path if (seekIndex != null && string.IsNullOrWhiteSpace(seekIndex.SourceUrl) == false && seekIndex.SourceUrl.Equals(mediaFilePath) && seekIndex.StreamIndex == videoStream.StreamIndex) { // Set the index on the options object. e.Options.VideoSeekIndex = seekIndex; } } catch (Exception ex) { // Log the exception, and ignore it. Continue execution. Media?.LogError(SideLoadAspect, "Error loading seek index data.", ex); } } // Hardware device priorities var deviceCandidates = new[] { AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA, AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA, AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 }; // Hardware device selection if (videoStream.FPS <= 30) { foreach (var deviceType in deviceCandidates) { var accelerator = videoStream.HardwareDevices.FirstOrDefault(d => d.DeviceType == deviceType); if (accelerator == null) { continue; } if (GuiContext.Current.IsInDebugMode) { e.Options.VideoHardwareDevice = accelerator; } break; } } // Start building a video filter var videoFilter = new StringBuilder(); // The yadif filter de-interlaces the video; we check the field order if we need // to de-interlace the video automatically if (videoStream.IsInterlaced) { videoFilter.Append("yadif,"); } // Scale down to maximum 1080p screen resolution. if (videoStream.PixelHeight > 1080) { // e.Options.VideoHardwareDevice = null; videoFilter.Append("scale=-1:1080,"); } e.Options.VideoFilter = videoFilter.ToString().TrimEnd(','); // Since the MediaElement control belongs to a different thread // we have to set properties on its UI thread. GuiContext.Current.EnqueueInvoke(() => { Media.ClosedCaptionsChannel = videoStream.HasClosedCaptions ? CaptionsChannel.CC1 : CaptionsChannel.CCP; }); } // e.Options.AudioFilter = "aecho=0.8:0.9:1000:0.3"; // e.Options.AudioFilter = "chorus=0.5:0.9:50|60|40:0.4|0.32|0.3:0.25|0.4|0.3:2|2.3|1.3"; }
/// <summary> /// Handles the MediaOpening event of the Media control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="MediaOpeningEventArgs"/> instance containing the event data.</param> private void OnMediaOpening(object sender, MediaOpeningEventArgs e) { const string SideLoadAspect = "Client.SideLoad"; // You can start off by adjusting subtitles delay // This defaults to 0 but you can delay (or advance with a negative delay) // the subtitle timestamps. e.Options.SubtitlesDelay = TimeSpan.Zero; // See issue #216 // You can render audio and video as it becomes available but the downside of disabling time // synchronization is that video and audio will run on their own independent clocks. // Do not disable Time Sync for streams that need synchronized audio and video. e.Options.IsTimeSyncDisabled = e.Info.Format == "libndi_newtek" || e.Info.InputUrl.StartsWith("rtsp://uno"); // You can disable the requirement of buffering packets by setting the playback // buffer percent to 0. Values of less than 0.5 for live or network streams are not recommended. e.Options.MinimumPlaybackBufferPercent = e.Info.Format == "libndi_newtek" ? 0 : 0.5; // The audio renderer will try to keep the audio hardware synchronized // to the playback position by default. // A few WMV files I have tested don't have continuous enough audio packets to support // perfect synchronization between audio and video so we simply disable it. // Also if time synchronization is disabled, the recommendation is to also disable audio synchronization. Media.RendererOptions.AudioDisableSync = e.Options.IsTimeSyncDisabled || e.Info.InputUrl.EndsWith(".wmv"); // Legacy audio out is the use of the WinMM api as opposed to using DirectSound // Enable legacy audio out if you are having issues with the DirectSound driver. Media.RendererOptions.UseLegacyAudioOut = e.Info.InputUrl.EndsWith(".wmv"); // You can limit how often the video renderer updates the picture. // We keep it as 0 to refresh the video according to the native stream specification. Media.RendererOptions.VideoRefreshRateLimit = 0; // Get the local file path from the URL (if possible) var mediaFilePath = string.Empty; try { var url = new Uri(e.Info.InputUrl); mediaFilePath = url.IsFile || url.IsUnc ? Path.GetFullPath(url.LocalPath) : string.Empty; } catch { /* Ignore Exceptions */ } // Example of automatically side-loading SRT subs if (string.IsNullOrWhiteSpace(mediaFilePath) == false) { var srtFilePath = Path.ChangeExtension(mediaFilePath, "srt"); if (File.Exists(srtFilePath)) { e.Options.SubtitlesUrl = srtFilePath; } } // You can also force video FPS if necessary // see: https://github.com/unosquare/ffmediaelement/issues/212 // e.Options.VideoForcedFps = 25; // An example of selecting a specific subtitle stream var subtitleStreams = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_SUBTITLE).Select(kvp => kvp.Value); var englishSubtitleStream = subtitleStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en")); if (englishSubtitleStream != null) { e.Options.SubtitleStream = englishSubtitleStream; } // An example of selecting a specific audio stream var audioStreams = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_AUDIO).Select(kvp => kvp.Value); var englishAudioStream = audioStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en")); if (englishAudioStream != null) { e.Options.AudioStream = englishAudioStream; } // Setting Advanced Video Stream Options is also possible // ReSharper disable once InvertIf if (e.Options.VideoStream is StreamInfo videoStream) { // If we have a valid seek index let's use it! if (string.IsNullOrWhiteSpace(mediaFilePath) == false) { try { // Try to Create or Load a Seek Index var durationSeconds = e.Info.Duration.TotalSeconds > 0 ? e.Info.Duration.TotalSeconds : 0; var seekIndex = LoadOrCreateVideoSeekIndex(mediaFilePath, videoStream.StreamIndex, durationSeconds); // Make sure the seek index belongs to the media file path if (seekIndex != null && string.IsNullOrWhiteSpace(seekIndex.SourceUrl) == false && seekIndex.SourceUrl.Equals(mediaFilePath) && seekIndex.StreamIndex == videoStream.StreamIndex) { // Set the index on the options object. e.Options.VideoSeekIndex = seekIndex; } } catch (Exception ex) { // Log the exception, and ignore it. Continue execution. Media?.LogError(SideLoadAspect, "Error loading seek index data.", ex); } } // Hardware device priorities var deviceCandidates = new[] { AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA, AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA, AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 }; // Hardware device selection if (videoStream.FPS <= 30) { foreach (var deviceType in deviceCandidates) { var accelerator = videoStream.HardwareDevices.FirstOrDefault(d => d.DeviceType == deviceType); if (accelerator == null) { continue; } if (GuiContext.Current.IsInDebugMode == true) { e.Options.VideoHardwareDevice = accelerator; } break; } } // Start building a video filter var videoFilter = new StringBuilder(); // The yadif filter de-interlaces the video; we check the field order if we need // to de-interlace the video automatically if (videoStream.IsInterlaced) { videoFilter.Append("yadif,"); } // Scale down to maximum 1080p screen resolution. if (videoStream.PixelHeight > 1080) { // e.Options.VideoHardwareDevice = null; videoFilter.Append("scale=-1:1080,"); } // Example of fisheye correction filter: // videoFilter.Append("lenscorrection=cx=0.5:cy=0.5:k1=-0.85:k2=0.25,") e.Options.VideoFilter = videoFilter.ToString().TrimEnd(','); // Since the MediaElement control belongs to the GUI thread // and the closed captions channel property is a dependency // property, we need to set it on the GUI thread. GuiContext.Current.EnqueueInvoke(() => { Media.ClosedCaptionsChannel = videoStream.HasClosedCaptions ? CaptionsChannel.CC1 : CaptionsChannel.CCP; }); } // Examples of setting audio filters. // e.Options.AudioFilter = "aecho=0.8:0.9:1000:0.3"; // e.Options.AudioFilter = "chorus=0.5:0.9:50|60|40:0.4|0.32|0.3:0.25|0.4|0.3:2|2.3|1.3"; // e.Options.AudioFilter = "aphaser"; }
/// <summary> /// Handles the MediaOpening event of the Media control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="MediaOpeningEventArgs"/> instance containing the event data.</param> private void OnMediaOpening(object sender, MediaOpeningEventArgs e) { // You can start off by adjusting subtitles delay // e.Options.SubtitlesDelay = TimeSpan.FromSeconds(7); // See issue #216 // Example of automatically side-loading SRT subs try { var inputUrl = e.Info.InputUrl; var url = new Uri(inputUrl); if (url.IsFile || url.IsUnc) { inputUrl = System.IO.Path.ChangeExtension(url.LocalPath, "srt"); if (System.IO.File.Exists(inputUrl)) { e.Options.SubtitlesUrl = inputUrl; } } } catch { } // You can force video FPS if necessary // see: https://github.com/unosquare/ffmediaelement/issues/212 // e.Options.VideoForcedFps = 25; // An example of specifcally selecting a subtitle stream var subtitleStreams = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_SUBTITLE).Select(kvp => kvp.Value); var englishSubtitleStream = subtitleStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en")); if (englishSubtitleStream != null) { e.Options.SubtitleStream = englishSubtitleStream; } // An example of specifcally selecting an audio stream var audioStreams = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_AUDIO).Select(kvp => kvp.Value); var englishAudioStream = audioStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en")); if (englishAudioStream != null) { e.Options.AudioStream = englishAudioStream; } // Setting Advanced Video Stream Options is also possible var videoStream = e.Options.VideoStream; if (videoStream != null) { // Check if the video requires deinterlacing var requiresDeinterlace = videoStream.FieldOrder != AVFieldOrder.AV_FIELD_PROGRESSIVE && videoStream.FieldOrder != AVFieldOrder.AV_FIELD_UNKNOWN; // Hardwrae device priorities var deviceCandidates = new AVHWDeviceType[] { AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA, AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA, AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 }; // Hardware device selection if (videoStream.FPS <= 30) { foreach (var deviceType in deviceCandidates) { var accelerator = videoStream.HardwareDevices.FirstOrDefault(d => d.DeviceType == deviceType); if (accelerator != null) { if (GuiContext.Current.IsInDebugMode) { e.Options.VideoHardwareDevice = accelerator; } break; } } } // Start building a video filter var videoFilter = new StringBuilder(); // The yadif filter deinterlaces the video; we check the field order if we need // to deinterlace the video automatically if (requiresDeinterlace) { videoFilter.Append("yadif,"); } // Scale down to maximum 1080p screen resolution. if (videoStream.PixelHeight > 1080) { // e.Options.VideoHardwareDevice = null; videoFilter.Append($"scale=-1:1080,"); } e.Options.VideoFilter = videoFilter.ToString().TrimEnd(','); // Since the MediaElement control belongs to a different thread // we have to set properties on its UI thread. GuiContext.Current.EnqueueInvoke(() => { Media.ClosedCaptionsChannel = videoStream.HasClosedCaptions ? CaptionsChannel.CC1 : CaptionsChannel.CCP; }); } // e.Options.AudioFilter = "aecho=0.8:0.9:1000:0.3"; // e.Options.AudioFilter = "chorus=0.5:0.9:50|60|40:0.4|0.32|0.3:0.25|0.4|0.3:2|2.3|1.3"; }
private void MediaPlayer_MediaChanging(object sender, MediaOpeningEventArgs e) { HandleChangeAudioLanguage(e); }
/// <summary> /// Handles the MediaChanging event of the MediaControl. /// </summary> /// <param name="sender">The sender.</param> /// <param name="e">The <see cref="MediaOpeningEventArgs"/> instance containing the event data.</param> private void OnMediaChanging(object sender, MediaOpeningEventArgs e) { ViewModel.NotificationMessage = "Media is updating . . ."; var availableStreams = e.Info.Streams .Where(s => s.Value.CodecType == (AVMediaType)StreamCycleMediaType) .Select(x => x.Value) .ToList(); if (availableStreams.Count <= 0) { return; } // Allow cycling though a null stream (means removing the stream) // Except for video streams. if (StreamCycleMediaType != MediaType.Video) { availableStreams.Add(null); } int currentIndex; switch (StreamCycleMediaType) { case MediaType.Audio: { currentIndex = availableStreams.IndexOf(e.Options.AudioStream); currentIndex += 1; if (currentIndex >= (availableStreams.Count - 1)) { currentIndex = 0; } e.Options.AudioStream = availableStreams[currentIndex]; } break; case MediaType.Video: { currentIndex = availableStreams.IndexOf(e.Options.VideoStream); currentIndex += 1; if (currentIndex >= availableStreams.Count) { currentIndex = 0; } e.Options.VideoStream = availableStreams[currentIndex]; } break; case MediaType.Subtitle: { currentIndex = availableStreams.IndexOf(e.Options.SubtitleStream); currentIndex += 1; if (currentIndex >= availableStreams.Count) { currentIndex = 0; } e.Options.SubtitleStream = availableStreams[currentIndex]; } break; default: return; } }