private void SetupDownloadedSubtitleAndIMDbInfo(Uri uri, string resultSub, IMDb imdb, object param2) { OnlineSubtitleChoices.Clear(); if (HasVideo == false) { return; } IMDb = imdb; if (param2 is List <SubtitleMatch> ) { foreach (var st in (List <SubtitleMatch>)param2) { OnlineSubtitleChoices.Add(st); } Main.ShowOsdMessage(string.Format("{0} subtitles found.", OnlineSubtitleChoices.Count)); } FillSubs(uri); if (SubtitleStreams.Any(s => s.Path.ToLowerInvariant() == resultSub.ToLowerInvariant()) == false) { SubtitleStreams.Add(new SubtitleItem(SubtitleItem.SubtitleType.File, SubtitleItem.SubtitleSubType.Srt, resultSub, "Subtitle")); } var loadSub = (DownloadedSubtitle = SubtitleStreams.FirstOrDefault(s => s.Path.ToLowerInvariant() == resultSub.ToLowerInvariant())); ServiceLocator.GetService <IMainView>().DelayedInvoke(() => { SelectedSubtitle = loadSub; }, 200); }
public override void OnVideoOpened() { foreach (var stream in Player.decoder.demuxer.streams) { if (stream.Type == FFmpeg.AutoGen.AVMediaType.AVMEDIA_TYPE_AUDIO) { AudioStreams.Add(new AudioStream() { DecoderInput = new DecoderInput() { StreamIndex = stream.StreamIndex }, BitRate = stream.BitRate, Language = Language.Get(stream.Language), SampleFormat = stream.SampleFormatStr, SampleRate = stream.SampleRate, Channels = stream.Channels, Bits = stream.Bits }); } else if (stream.Type == FFmpeg.AutoGen.AVMediaType.AVMEDIA_TYPE_VIDEO) { VideoStream videoStream = new VideoStream(); VideoStream ptrVideoStream = stream.StreamIndex == Player.decoder.vDecoder.st->index ? defaultVideo : videoStream; ptrVideoStream.DecoderInput = new DecoderInput() { StreamIndex = stream.StreamIndex }; ptrVideoStream.BitRate = stream.BitRate; ptrVideoStream.Language = Language.Get(stream.Language); ptrVideoStream.PixelFormat = stream.PixelFormatStr; ptrVideoStream.Width = stream.Width; ptrVideoStream.Height = stream.Height; ptrVideoStream.FPS = stream.FPS; VideoStreams.Add(ptrVideoStream); } else if (stream.Type == FFmpeg.AutoGen.AVMediaType.AVMEDIA_TYPE_SUBTITLE) { SubtitleStreams.Add(new SubtitleStream() { DecoderInput = new DecoderInput() { StreamIndex = stream.StreamIndex }, BitRate = stream.BitRate, Language = Language.Get(stream.Language), Downloaded = true, Converted = true }); } } defaultVideo.InUse = true; }
private SubtitleItem FillSubs(Uri video) { List <SubtitleItem> subs = new List <SubtitleItem>(); SubtitleStreams.Clear(); var scMgr = new Subtitles.Subtitles(); SubtitleStreams.Add(new SubtitleItem(SubtitleItem.SubtitleType.None, SubtitleItem.SubtitleSubType.None, "", "<No Subtitles>")); // embedded //long wouldLikeToLoadEmbedded = -1; //bool loadedEmbeddedSub = (video.LocalPath.ToLowerInvariant().EndsWith("mkv") || // video.LocalPath.ToLowerInvariant().EndsWith("mp4")) && // (wouldLikeToLoadEmbedded = scMgr.ListEmbeddedSubtitles(video.LocalPath, out subs)) >= 0; //foreach (var embeddedSubtitleStream in subs) //{ // SubtitleStreams.Add(embeddedSubtitleStream); //} //subs.Clear(); // TODO would like to load embedded if (EmbeddedSubtitleStreams != null && EmbeddedSubtitleStreams.Count > 0) { ReinsertEmbeddedSubtitlesIntoSubtitleStreams(EmbeddedSubtitleStreams); } // files string wouldLikeToLoadFile = scMgr.LoadSubtitles(video.LocalPath, out subs, null); foreach (var fileSub in subs) { SubtitleStreams.Add(fileSub); } if (!string.IsNullOrEmpty(wouldLikeToLoadFile)) { return(SubtitleStreams.First(f => f.Path == wouldLikeToLoadFile)); } //if (wouldLikeToLoadEmbedded >= 0 && loadedEmbeddedSub) //{ // return SubtitleStreams.First(e => e.Path == wouldLikeToLoadEmbedded.ToString()); //} return(null); }
/// <summary> /// Formatiert den Wert der aktuellen Instanz unter Verwendung des angegebenen Formats. /// </summary> /// <returns> /// Der Wert der aktuellen Instanz im angegebenen Format. /// </returns> /// <param name="format">Das zu verwendende Format.– oder – Ein NULL-Verweis (Nothing in Visual Basic), /// wenn das für den Typ der <see cref="T:System.IFormattable"/> -Implementierung definierte Standardformat verwendet werden soll. </param> /// <param name="formatProvider">Der zum Formatieren des Werts zu verwendende Anbieter.– oder – Ein NULL-Verweis (Nothing in Visual Basic), /// wenn die Informationen über numerische Formate dem aktuellen Gebietsschema des Betriebssystems entnommen werden sollen. </param> /// <filterpriority>2</filterpriority> public string ToString(string format, IFormatProvider formatProvider) { var result = string.Empty; result += $"JobName: {JobName} {Environment.NewLine}"; result += $"BaseName: {BaseName} {Environment.NewLine}"; result += $"InputFile: {InputFile} {Environment.NewLine}"; result += $"InputType: {Input} {Environment.NewLine}"; result += $"OutputFile: {OutputFile} {Environment.NewLine}"; result += Environment.NewLine; result += $"AudioStreams: {Environment.NewLine}"; result = AudioStreams.Aggregate(result, (current, item) => current + $"{item} {Environment.NewLine}"); result += Environment.NewLine; result += $"SubtitleStreams: {Environment.NewLine}"; result = SubtitleStreams.Aggregate(result, (current, item) => current + $"{item} {Environment.NewLine}"); result += Environment.NewLine; var list = new List <string>(); foreach (var item in Chapters) { var dt = DateTime.MinValue.Add(item); list.Add(dt.ToString("H:mm:ss.fff")); } result += $"Chapters: {string.Join(",", list.ToArray())} {Environment.NewLine}"; result += $"NextStep: {NextStep} {Environment.NewLine}"; result += $"CompletedStep: {CompletedStep} {Environment.NewLine}"; result += Environment.NewLine; result += $"VideoStream: {Environment.NewLine}"; result += $"{VideoStream} {Environment.NewLine}"; result += Environment.NewLine; result += $"StreamID: {StreamId:0} {Environment.NewLine}"; result += $"TrackID: {TrackId:0} {Environment.NewLine}"; result += $"TempInput: {TempInput} {Environment.NewLine}"; result += $"TempOutput: {TempOutput} {Environment.NewLine}"; result += $"DumpOutput: {DumpOutput} {Environment.NewLine}"; result += $"SelectedDVDChapters:{SelectedDvdChapters} {Environment.NewLine}"; result += $"TempFiles: {string.Join(",", TempFiles.ToArray())} {Environment.NewLine}"; result += $"ReturnValue: {ExitCode:0} {Environment.NewLine}"; return(result); }
public void LoadSelectedOnlineSubtitle() { ShowOnlineSubtitles = false; BackgroundWorker b = new BackgroundWorker(); b.DoWork += (sender, args) => { DateTime start = DateTime.Now; Monitor.Enter(_subtitleSearchLocker); try { args.Result = SubtitleUtil.DownloadSubtitle(SelectedOnlineSubtitle, Source.LocalPath); } catch (WebException) { Main.ShowOsdMessage("Failed to download subtitle: Internet connection unavailable"); } catch (Exception) { Main.ShowOsdMessage("Failed to download subtitle from '" + SelectedOnlineSubtitle.Service + "'"); } finally { Monitor.Exit(_subtitleSearchLocker); if (DateTime.Now - start > TimeSpan.FromSeconds(15)) { args.Cancel = true; } } }; b.RunWorkerCompleted += (sender, args) => { if (!args.Cancelled && args.Error == null && args.Result is string && Source != null) { FillSubs(Source); string resultSub = (string)args.Result; if (SubtitleStreams.Any(s => s.Path.ToLowerInvariant() == resultSub.ToLowerInvariant()) == false) { SubtitleStreams.Add(new SubtitleItem(SubtitleItem.SubtitleType.File, SubtitleItem.SubtitleSubType.Srt, resultSub, "Subtitle")); } var loadSub = (DownloadedSubtitle = SubtitleStreams.FirstOrDefault(s => s.Path.ToLowerInvariant() == (resultSub).ToLowerInvariant())); ServiceLocator.GetService <IMainView>().DelayedInvoke(() => { SelectedSubtitle = loadSub; }, 200); } }; b.RunWorkerAsync(); }
private void ReinsertEmbeddedSubtitlesIntoSubtitleStreams(ObservableCollection <string> value) { for (int i = SubtitleStreams.Count - 1; i >= 0; i--) { if (SubtitleStreams[i].Type == SubtitleItem.SubtitleType.Embedded) { SubtitleStreams.RemoveAt(i); } } for (int i = 0; i < value.Count; i++) { if (value[i].ToLowerInvariant() == "s: no subtitles") { continue; } string name = "Embedded: " + value[i].Substring(2); SubtitleStreams.Insert(i + 1, new SubtitleItem(SubtitleItem.SubtitleType.Embedded, SubtitleItem.SubtitleSubType.None, value[i], name)); } }
private void SetBestSubtitles() { if (!Monitor.TryEnter(_subtitleSearchLocker, 0)) { // no point in doing this if we are already downloading subtitles, which will do the same thing effectively return; } SubtitleItem bestSubMatch = null; try { if (!_subtitleIsDownloading && Source != null && SubtitleStreams.Any(s => s.Type == SubtitleItem.SubtitleType.File && File.Exists(s.Path))) { // if nothing is downloading and we have file subs in the folder string lcode = Main.SubtitleLanguages.Count > 0 ? Main.SubtitleLanguages[0].Id : ""; for (int i = 0; i < Main.SubtitleLanguages.Count; i++) { // TODO this should care also for EMBEDDED subs bestSubMatch = SubtitleStreams.Where(s => s.Type == SubtitleItem.SubtitleType.File && File.Exists(s.Path)).OrderBy(f => Path.GetFileNameWithoutExtension(f.Path).ToLowerInvariant().StartsWith(Path.GetFileNameWithoutExtension(Source.LocalPath.ToLowerInvariant())) && (lcode == "" || Path.GetFileNameWithoutExtension(f.Path).EndsWith(lcode)) ? 0 : 1).FirstOrDefault(); if (bestSubMatch != null) { break; } } ServiceLocator.GetService <IMainView>().DelayedInvoke(() => { SelectedSubtitle = bestSubMatch; }, 200); return; } } finally { Monitor.Exit(_subtitleSearchLocker); } if (bestSubMatch == null) { DownloadSubtitleForUriAndQueryIMDB(Source); } }
public SubtitleStream OpenSubtitles(string url) { foreach (var stream in SubtitleStreams) { if (stream.DecoderInput.Url == url || stream.Tag.ToString() == url) { return(stream); } } SubtitleStreams.Add(new SubtitleStream() { DecoderInput = new DecoderInput() { Url = url }, Downloaded = true, Tag = url // Use it here because of possible convert to Utf8 and rename }); return(SubtitleStreams[SubtitleStreams.Count - 1]); }
public void Search(string filename, string hash, long length, List <Language> Languages) { List <OpenSubtitlesJson> subs = SearchByHash(hash, length); bool imdbExists = subs != null && subs.Count > 0 && subs[0].IDMovieImdb != null && subs[0].IDMovieImdb.Trim() != ""; bool isEpisode = imdbExists && subs[0].SeriesSeason != null && subs[0].SeriesSeason.Trim() != "" && subs[0].SeriesSeason.Trim() != "0" && subs[0].SeriesEpisode != null && subs[0].SeriesEpisode.Trim() != "" && subs[0].SeriesEpisode.Trim() != "0"; foreach (Language lang in Languages) { if (imdbExists) { if (isEpisode) { subs.AddRange(SearchByIMDB(subs[0].IDMovieImdb, lang, subs[0].SeriesSeason, subs[0].SeriesEpisode)); } else { subs.AddRange(SearchByIMDB(subs[0].IDMovieImdb, lang)); } } subs.AddRange(SearchByName(filename, lang)); } // Unique by SubHashes (if any) List <OpenSubtitlesJson> uniqueList = new List <OpenSubtitlesJson>(); List <int> removeIds = new List <int>(); for (int i = 0; i < subs.Count - 1; i++) { if (removeIds.Contains(i)) { continue; } for (int l = i + 1; l < subs.Count; l++) { if (removeIds.Contains(l)) { continue; } if (subs[l].SubHash == subs[i].SubHash) { if (subs[l].AvailableAt == null) { removeIds.Add(l); } else { removeIds.Add(i); break; } } } } for (int i = 0; i < subs.Count; i++) { if (!removeIds.Contains(i)) { uniqueList.Add(subs[i]); } } subs.Clear(); foreach (Language lang in Languages) { IEnumerable <OpenSubtitlesJson> movieHashRating = from sub in uniqueList where sub.ISO639 != null && sub.ISO639 == lang.ISO639 && sub.MatchedBy.ToLower() == "moviehash" orderby float.Parse(sub.SubRating) descending select sub; IEnumerable <OpenSubtitlesJson> rating = from sub in uniqueList where sub.ISO639 != null && sub.ISO639 == lang.ISO639 && sub.MatchedBy.ToLower() != "moviehash" orderby float.Parse(sub.SubRating) descending select sub; foreach (var t1 in movieHashRating) { subs.Add(t1); } foreach (var t1 in rating) { subs.Add(t1); } } foreach (var sub in subs) { SubtitleStreams.Add(new SubtitleStream() { UrlName = sub.SubFileName, Rating = sub.SubRating, Language = Language.Get(sub.ISO639), Tag = sub }); } }
/// <summary> /// Formatiert den Wert der aktuellen Instanz unter Verwendung des angegebenen Formats. /// </summary> /// <returns> /// Der Wert der aktuellen Instanz im angegebenen Format. /// </returns> /// <param name="format">Das zu verwendende Format.– oder – Ein NULL-Verweis (Nothing in Visual Basic), /// wenn das für den Typ der <see cref="T:System.IFormattable"/> -Implementierung definierte Standardformat verwendet werden soll. </param> /// <param name="formatProvider">Der zum Formatieren des Werts zu verwendende Anbieter.– oder – Ein NULL-Verweis (Nothing in Visual Basic), /// wenn die Informationen über numerische Formate dem aktuellen Gebietsschema des Betriebssystems entnommen werden sollen. </param> /// <filterpriority>2</filterpriority> public string ToString(string format, IFormatProvider formatProvider) { string result = string.Empty; result += string.Format(AppSettings.CInfo, "JobName: {0:s} {1:s}", JobName, Environment.NewLine); result += string.Format(AppSettings.CInfo, "BaseName: {0:s} {1:s}", BaseName, Environment.NewLine); result += string.Format(AppSettings.CInfo, "InputFile: {0:s} {1:s}", InputFile, Environment.NewLine); result += string.Format(AppSettings.CInfo, "InputType: {0:s} {1:s}", Input.ToString(), Environment.NewLine); result += string.Format(AppSettings.CInfo, "OutputFile: {0:s} {1:s}", OutputFile, Environment.NewLine); result += Environment.NewLine; result += string.Format(AppSettings.CInfo, "AudioStreams: {0:s}", Environment.NewLine); result = AudioStreams.Aggregate(result, (current, item) => current + string.Format(AppSettings.CInfo, "{0:s} {1:s}", item, Environment.NewLine)); result += Environment.NewLine; result += string.Format(AppSettings.CInfo, "SubtitleStreams: {0:s}", Environment.NewLine); result = SubtitleStreams.Aggregate(result, (current, item) => current + string.Format(AppSettings.CInfo, "{0:s} {1:s}", item, Environment.NewLine)); result += Environment.NewLine; result += string.Format(AppSettings.CInfo, "Chapters: {0:s} {1:s}", string.Join(",", (from item in Chapters let dt = new DateTime() select DateTime.MinValue.Add(item) into dt select dt.ToString("H:mm:ss.fff")).ToArray()), Environment.NewLine); result += string.Format(AppSettings.CInfo, "NextStep: {0:s} {1:s}", NextStep.ToString(), Environment.NewLine); result += string.Format(AppSettings.CInfo, "CompletedStep: {0:s} {1:s}", CompletedStep.ToString(), Environment.NewLine); result += Environment.NewLine; result += string.Format(AppSettings.CInfo, "VideoStream: {0:s}", Environment.NewLine); result += string.Format(AppSettings.CInfo, "{0:s} {1:s}", VideoStream, Environment.NewLine); result += Environment.NewLine; result += string.Format(AppSettings.CInfo, "StreamID: {0:g} {1:s}", StreamId, Environment.NewLine); result += string.Format(AppSettings.CInfo, "TrackID: {0:g} {1:s}", TrackId, Environment.NewLine); result += string.Format(AppSettings.CInfo, "TempInput: {0:s} {1:s}", TempInput, Environment.NewLine); result += string.Format(AppSettings.CInfo, "TempOutput: {0:s} {1:s}", TempOutput, Environment.NewLine); result += string.Format(AppSettings.CInfo, "DumpOutput: {0:s} {1:s}", DumpOutput, Environment.NewLine); result += string.Format(AppSettings.CInfo, "SelectedDVDChapters:{0:s} {1:s}", SelectedDvdChapters, Environment.NewLine); result += string.Format(AppSettings.CInfo, "TempFiles: {0:s} {1:s}", string.Join(",", TempFiles.ToArray()), Environment.NewLine); result += string.Format(AppSettings.CInfo, "ReturnValue: {0:g} {1:s}", ExitCode, Environment.NewLine); return(result); }
/// <summary> /// Opens the media by initializing the DirectShow graph // // +----------------+ +----------------+ +-----------------------+ // +---------------------+ | LavVideo | | VobSub | | EVR+CustomPresenter | // | LavSplitterSource | |----------------| |----------------| |-----------------------| // +---------------------+ | | | | | | // | | | | | | | VIDEO | // | video +|->+---------+<-+ IN OUT +->+------+<-+ VID_IN OUT +-> +-+ <-+ RENDERER | // | | +----------------+ | | | | // | audio +|->+------+ | | +-----------------------+ // | | | +----------------+ +-+<-+ TXT_IN | // | subtitle +|->+--+ | | LavAudio | | | | // +---------------------+ | | |----------------| | +----------------+ +-----------------------+ // | | | | | | DShow output device | // | | | | xxx |-----------------------| // | +--+<-+ IN OUT +->+--x | x-----------------------+ | | // | +----------------+ | | AUDIO | // | | <-+ RENDERER | // | | | | // +--------------------------------+ +-----------------------+ // /// </summary> // protected virtual void OpenSource() // { // /* Make sure we clean up any remaining mess */ // //if (m_graph != null) RemoveAllFilters(m_graph); // FreeResources(); // if (m_sourceUri == null) // return; // string fileSource = m_sourceUri.OriginalString; // if (string.IsNullOrEmpty(fileSource)) // return; // try // { // if (m_graph != null) Marshal.ReleaseComObject(m_graph); // /* Creates the GraphBuilder COM object */ // m_graph = new FilterGraphNoThread() as IGraphBuilder; // if (m_graph == null) // throw new Exception("Could not create a graph"); // /* Add our prefered audio renderer */ // var audioRenderer = InsertAudioRenderer(AudioRenderer); // if (_audioRenderer != null) Marshal.ReleaseComObject(_audioRenderer); // _audioRenderer = audioRenderer; // if ((System.Environment.OSVersion.Platform == PlatformID.Win32NT && // (System.Environment.OSVersion.Version.Major == 5))) // VideoRenderer = VideoRendererType.VideoMixingRenderer9; // IBaseFilter renderer = CreateVideoRenderer(VideoRenderer, m_graph, 2); // if (_renderer != null) Marshal.ReleaseComObject(_renderer); // _renderer = renderer; // //if (_renderer != null) // // m_graph.AddFilter((IBaseFilter)_renderer, "Renderer"); // var filterGraph = m_graph as IFilterGraph2; // if (filterGraph == null) // throw new Exception("Could not QueryInterface for the IFilterGraph2"); // ILAVAudioSettings lavAudioSettings; // ILAVAudioStatus lavStatus; // IBaseFilter audioDecoder = FilterProvider.GetAudioFilter(out lavAudioSettings, out lavStatus); // if (audioDecoder != null) // { // if (_audio != null) Marshal.ReleaseComObject(_audio); // _audio = audioDecoder; // lavAudioSettings.SetRuntimeConfig(true); // m_graph.AddFilter((IBaseFilter)_audio, "LavAudio"); // } // ILAVSplitterSettings splitterSettings; // IFileSourceFilter splitter = FilterProvider.GetSplitterSource(out splitterSettings); // //IBaseFilter splitter = FilterProvider.GetSplitter(out splitterSettings); // if (splitter != null) // { // splitter.Load(fileSource, null); // if (_splitter != null) Marshal.ReleaseComObject(_splitter); // _splitter = splitter; // splitterSettings.SetRuntimeConfig(true); // m_graph.AddFilter((IBaseFilter)splitter, "LavSplitter"); // } // int hr = 0; // /* We will want to enum all the pins on the source filter */ // IEnumPins pinEnum; // hr = ((IBaseFilter)splitter).EnumPins(out pinEnum); // DsError.ThrowExceptionForHR(hr); // IntPtr fetched = IntPtr.Zero; // IPin[] pins = { null }; // /* Counter for how many pins successfully rendered */ // if (VideoRenderer == VideoRendererType.VideoMixingRenderer9) // { // var mixer = renderer as IVMRMixerControl9; // if (mixer != null) // { // VMR9MixerPrefs dwPrefs; // mixer.GetMixingPrefs(out dwPrefs); // dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; // dwPrefs |= VMR9MixerPrefs.RenderTargetRGB; // //mixer.SetMixingPrefs(dwPrefs); // } // } // // Test using FFDShow Video Decoder Filter // ILAVVideoSettings lavVideoSettings; // IBaseFilter lavVideo = FilterProvider.GetVideoFilter(out lavVideoSettings); // if (_video != null) Marshal.ReleaseComObject(_video); // _video = lavVideo; // IBaseFilter vobSub = FilterProvider.GetVobSubFilter(); // if (vobSub != null) // { // m_graph.AddFilter(vobSub, "VobSub"); // IDirectVobSub vss = vobSub as IDirectVobSub; // if (_vobsub != null) Marshal.ReleaseComObject(_vobsub); // _vobsub = vss; // InitSubSettings(); // } // if (lavVideo != null) // { // lavVideoSettings.SetRuntimeConfig(true); // m_graph.AddFilter(lavVideo, "LavVideo"); // } // int ret; // IBaseFilter dcDsp = FilterProvider.GetDCDSPFilter(); // if (dcDsp != null) // { // _dspFilter = (IDCDSPFilterInterface)dcDsp; // //hr = i.set_PCMDataBeforeMainDSP(true); // hr = m_graph.AddFilter((IBaseFilter)dcDsp, "VobSub"); // ret = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Audio"), DsFindPin.ByDirection(audioDecoder, PinDirection.Input, 0)); // ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)audioDecoder, PinDirection.Output, 0), DsFindPin.ByDirection(_dspFilter, PinDirection.Input, 0)); // ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_dspFilter, PinDirection.Output, 0), DsFindPin.ByDirection(audioRenderer, PinDirection.Input, 0)); // //bool d = false; // //int delay = 0; // //hr = i.get_EnableDelay(ref d); // int cnt = 0; // object intf = null; // //hr = i.set_EnableDelay(true); // //hr = i.set_Delay(0); // hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftEqualizer); // hr = _dspFilter.get_FilterCount(ref cnt); // hr = _dspFilter.get_FilterInterface(0, out intf); // _equalizer = (IDCEqualizer)intf; // hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftDownMix); // hr = _dspFilter.get_FilterInterface(0, out intf); // _downmix = (IDCDownMix)intf; // hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftAmplify); // hr = _dspFilter.get_FilterInterface(0, out intf); // _amplify = (IDCAmplify)intf; // _equalizer.set_Seperate(false); // } // bool subconnected = false; // ret = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Video"), DsFindPin.ByDirection(lavVideo, PinDirection.Input, 0)); // ret = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)lavVideo, PinDirection.Output, 0), DsFindPin.ByDirection(vobSub, PinDirection.Input, 0)); // if (ret == 0) // { // int lc; // ((IDirectVobSub)vobSub).get_LanguageCount(out lc); // subconnected = (lc != 0); // IPin pn = DsFindPin.ByName((IBaseFilter)splitter, "Subtitle"); // if (pn != null) // { // ret = m_graph.Connect(pn, DsFindPin.ByDirection(vobSub, PinDirection.Input, 1)); // ((IDirectVobSub)vobSub).get_LanguageCount(out lc); // subconnected = (lc != 0); // } // ret = m_graph.Connect(DsFindPin.ByDirection(vobSub, PinDirection.Output, 0), // DsFindPin.ByDirection(renderer, PinDirection.Input, 0)); // } // else // { // ret = m_graph.Connect(DsFindPin.ByDirection(lavVideo, PinDirection.Output, 0), // DsFindPin.ByDirection(renderer, PinDirection.Input, 0)); // } // /* Loop over each pin of the source filter */ // while (pinEnum.Next(pins.Length, pins, fetched) == 0) // { // IPin cTo; // pins[0].ConnectedTo(out cTo); // if (cTo == null) // { // // this should not happen if the filtegraph is manually connected in a good manner // ret = filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero); // } // else // { // Marshal.ReleaseComObject(cTo); // } // Marshal.ReleaseComObject(pins[0]); // } // if (lavVideoSettings != null) // { // if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_CUDA) != 0) // { // ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_CUDA); // } // else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_QuickSync) != 0) // { // ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_QuickSync); // } // else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2Native) != 0) // { // ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2Native); // } // else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2) != 0) // { // ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2); // } // else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2CopyBack) != 0) // { // ret = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2CopyBack); // } // } // //hr = m_graph.RenderFile(fileSource, null); // Marshal.ReleaseComObject(pinEnum); // IAMStreamSelect selector = splitter as IAMStreamSelect; // int numstreams; // selector.Count(out numstreams); // AMMediaType mt; // AMStreamSelectInfoFlags fl; // SubtitleStreams.Clear(); // VideoStreams.Clear(); // AudioStreams.Clear(); // for (int i = 0; i < numstreams; i++) // { // int lcid; // int group; // string name; // object o, o2; // selector.Info(i, out mt, out fl, out lcid, out group, out name, out o, out o2); // switch (group) // { // case 0: // VideoStreams.Add(name); // break; // case 1: // AudioStreams.Add(name); // break; // case 2: // SubtitleStreams.Add(name); // break; // } // if (o != null) Marshal.ReleaseComObject(o); // if (o2 != null) Marshal.ReleaseComObject(o2); // } // OnPropertyChanged("SubtitleStreams"); // OnPropertyChanged("VideoStreams"); // OnPropertyChanged("AudioStreams"); // //Marshal.ReleaseComObject(splitter); // /* Configure the graph in the base class */ // SetupFilterGraph(m_graph); //#if DEBUG // /* Adds the GB to the ROT so we can view //* it in graphedit */ // m_dsRotEntry = new DsROTEntry(m_graph); //#endif // //if (_splitterSettings != null) // //{ // // Marshal.ReleaseComObject(_splitterSettings); // // _splitterSettings = null; // //} // if (_splitterSettings != null) Marshal.ReleaseComObject(_splitterSettings); // _splitterSettings = (ILAVSplitterSettings)splitter; // //ret = _splitterSettings.SetRuntimeConfig(true); // //if (ret != 0) // // throw new Exception("Could not set SetRuntimeConfig to true"); // //string sss = "*:*"; // //LAVSubtitleMode mode = LAVSubtitleMode.LAVSubtitleMode_NoSubs; // //mode = _splitterSettings.GetSubtitleMode(); // //if (mode != LAVSubtitleMode.LAVSubtitleMode_Default) // // throw new Exception("Could not set GetAdvancedSubtitleConfige"); // //ret = _splitterSettings.SetSubtitleMode(LAVSubtitleMode.LAVSubtitleMode_Advanced); // //if (ret != 1) // // throw new Exception("Could not set SetAdvancedSubtitleConfige"); // //ret = _splitterSettings.SetAdvancedSubtitleConfig(sss); // //if (ret != 1) // // throw new Exception("Could not set SetAdvancedSubtitleConfige"); // //sss = ""; // //ret = _splitterSettings.GetAdvancedSubtitleConfig(out sss); // //if (ret != 0) // // throw new Exception("Could not set GetAdvancedSubtitleConfige"); // //IPin sub = DsFindPin.ByDirection((IBaseFilter)splitter, PinDirection.Output, 2); // //PinInfo pi; // //sub.QueryPinInfo(out pi); // SIZE a, b; // if ((_displayControl).GetNativeVideoSize(out a, out b) == 0) // { // if (a.cx > 0 && a.cy > 0) // { // HasVideo = true; // SetNativePixelSizes(a); // } // } // if (!subconnected) // { // InvokeNoSubtitleLoaded(new EventArgs()); // } // else // { // InitSubSettings(); // } // /* Sets the NaturalVideoWidth/Height */ // //SetNativePixelSizes(renderer); // //InvokeMediaFailed(new MediaFailedEventArgs(sss, null)); // } // catch (Exception ex) // { // /* This exection will happen usually if the media does // * not exist or could not open due to not having the // * proper filters installed */ // FreeResources(); // /* Fire our failed event */ // InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); // } // finally // { // string filters = string.Join(Environment.NewLine, EnumAllFilters(m_graph).ToArray()); // System.Diagnostics.Debug.WriteLine(filters); // } // InvokeMediaOpened(); // } protected virtual void OpenSource() { _eqEnabled = false; //if (m_graph != null) //{ // //RemoveAllFilters(m_graph); // Marshal.ReleaseComObject(m_graph); //} /* Make sure we clean up any remaining mess */ FreeResources(); if (m_sourceUri == null) { return; } string fileSource = m_sourceUri.OriginalString; if (string.IsNullOrEmpty(fileSource)) { return; } try { int hr = 0; /* Creates the GraphBuilder COM object */ m_graph = new FilterGraphNoThread() as IGraphBuilder; if (_displayControl != null) { Marshal.ReleaseComObject(_displayControl); _displayControl = null; } if (_displayControlVMR != null) { Marshal.ReleaseComObject(_displayControlVMR); _displayControlVMR = null; } if (m_graph == null) { throw new Exception("Could not create a graph"); } var filterGraph = m_graph as IFilterGraph2; var flt = EnumAllFilters(m_graph).ToList(); if (filterGraph == null) { throw new Exception("Could not QueryInterface for the IFilterGraph2"); } /* Add our prefered audio renderer */ var audioRenderer = InsertAudioRenderer(AudioRenderer); if (audioRenderer != null) { if (_audioRenderer != null) { Marshal.ReleaseComObject(_audioRenderer); } _audioRenderer = audioRenderer; } if ((System.Environment.OSVersion.Platform == PlatformID.Win32NT && (System.Environment.OSVersion.Version.Major == 5))) { VideoRenderer = VideoRendererType.VideoMixingRenderer9; } if (_presenterSettings != null) { Marshal.ReleaseComObject(_presenterSettings); } if (_renderer != null) { Marshal.ReleaseComObject(_renderer); } IBaseFilter renderer = InsertVideoRenderer(VideoRenderer, m_graph, 1); _renderer = renderer; ILAVAudioSettings lavAudioSettings; ILAVAudioStatus lavStatus; IBaseFilter audioDecoder = FilterProvider.GetAudioFilter(out lavAudioSettings, out lavStatus); if (audioDecoder != null) { if (_audio != null) { Marshal.ReleaseComObject(_audio); } _audio = audioDecoder; _audioStatus = lavStatus; _audioSettings = lavAudioSettings; hr = (int)lavAudioSettings.SetRuntimeConfig(true); hr = m_graph.AddFilter((IBaseFilter)audioDecoder, "LavAudio"); DsError.ThrowExceptionForHR(hr); #if DEBUG hr = (int)lavAudioSettings.SetTrayIcon(true); #endif } ILAVSplitterSettings splitterSettings; IFileSourceFilter splitter = FilterProvider.GetSplitterSource(out splitterSettings); if (splitter != null) { if (_splitter != null) { Marshal.ReleaseComObject(_splitter); } _splitter = splitter; _splitterSettings = (ILAVSplitterSettings)splitterSettings; hr = splitterSettings.SetRuntimeConfig(true); hr = splitter.Load(fileSource, null); if (hr != 0) { throw new Exception("Playback of this file is not supported!"); } hr = m_graph.AddFilter((IBaseFilter)splitter, "LavSplitter"); DsError.ThrowExceptionForHR(hr); } IEnumPins pinEnum; hr = ((IBaseFilter)splitter).EnumPins(out pinEnum); DsError.ThrowExceptionForHR(hr); IntPtr fetched = IntPtr.Zero; IPin[] pins = { null }; if (VideoRenderer == VideoRendererType.VideoMixingRenderer9) { var mixer = _renderer as IVMRMixerControl9; if (mixer != null) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetRGB; mixer.SetMixingPrefs(dwPrefs); } } ILAVVideoSettings lavVideoSettings; IBaseFilter lavVideo = FilterProvider.GetVideoFilter(out lavVideoSettings); if (lavVideo != null) { if (_video != null) { Marshal.ReleaseComObject(_video); } _video = lavVideo; if (lavVideoSettings != null) { _videoSettings = lavVideoSettings; lavVideoSettings.SetRuntimeConfig(true); hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_None); // check for best acceleration available //if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_CUDA) != 0) //{ // hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_CUDA); // hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD); //} //else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_QuickSync) != 0) //{ // hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_QuickSync); // hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD); //} //else if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2Native) != 0) { hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2Native); hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD); } //else //if (lavVideoSettings.CheckHWAccelSupport(LAVHWAccel.HWAccel_DXVA2CopyBack) != 0) //{ // hr = lavVideoSettings.SetHWAccel(LAVHWAccel.HWAccel_DXVA2CopyBack); // hr = lavVideoSettings.SetHWAccelResolutionFlags(LAVHWResFlag.SD | LAVHWResFlag.HD | LAVHWResFlag.UHD); //} #if DEBUG hr = lavVideoSettings.SetTrayIcon(true); #endif } hr = m_graph.AddFilter(_video, "LavVideo"); DsError.ThrowExceptionForHR(hr); } IBaseFilter vobSub = FilterProvider.GetVobSubFilter(); if (vobSub != null) { try { hr = m_graph.AddFilter(vobSub, "VobSub"); DsError.ThrowExceptionForHR(hr); IDirectVobSub vss = vobSub as IDirectVobSub; if (_vobsub != null) { Marshal.ReleaseComObject(_vobsub); } _vobsub = vss; InitSubSettings(); } catch { } } hr = m_graph.Connect(DsFindPin.ByName((IBaseFilter)splitter, "Audio"), DsFindPin.ByDirection(_audio, PinDirection.Input, 0)); if (hr == 0) { HasAudio = true; } else { HasAudio = false; } IBaseFilter dcDsp = FilterProvider.GetDCDSPFilter(); if (dcDsp != null) { if (_dspFilter != null) { Marshal.ReleaseComObject(_dspFilter); } _dspFilter = (IDCDSPFilterInterface)dcDsp; if (HasAudio) { hr = m_graph.AddFilter((IBaseFilter)_dspFilter, "AudioProcessor"); hr = _dspFilter.set_EnableBitrateConversionBeforeDSP(true); hr = ((IDCDSPFilterVisualInterface)_dspFilter).set_VISafterDSP(true); hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_audio, PinDirection.Output, 0), DsFindPin.ByDirection(_dspFilter, PinDirection.Input, 0)); DsError.ThrowExceptionForHR(hr); hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_dspFilter, PinDirection.Output, 0), DsFindPin.ByDirection(_audioRenderer, PinDirection.Input, 0)); var cb = new AudioCallback(this); hr = _dspFilter.set_CallBackPCM(cb); object intf = null; hr = _dspFilter.set_AddFilter(0, TDCFilterType.ftEqualizer); hr = _dspFilter.get_FilterInterface(0, out intf); _equalizer = (IDCEqualizer)intf; _equalizer.set_Seperate(false); } } else { if (HasAudio) { hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_audio, PinDirection.Output, 0), DsFindPin.ByDirection(_audioRenderer, PinDirection.Input, 0)); } } bool subconnected = false; hr = m_graph.Connect(DsFindPin.ByName((IBaseFilter)_splitter, "Video"), DsFindPin.ByDirection(_video, PinDirection.Input, 0)); if (hr == 0) { HasVideo = true; } else { HasVideo = false; } if (HasVideo) { hr = m_graph.Connect(DsFindPin.ByDirection((IBaseFilter)_video, PinDirection.Output, 0), DsFindPin.ByDirection(vobSub, PinDirection.Input, 0)); DsError.ThrowExceptionForHR(hr); if (hr == 0) { int lc; ((IDirectVobSub)vobSub).get_LanguageCount(out lc); subconnected = (lc != 0); IPin pn = DsFindPin.ByName((IBaseFilter)splitter, "Subtitle"); if (pn != null) { hr = m_graph.Connect(pn, DsFindPin.ByDirection(vobSub, PinDirection.Input, 1)); ((IDirectVobSub)vobSub).get_LanguageCount(out lc); subconnected = (lc != 0); } hr = m_graph.Connect(DsFindPin.ByDirection(vobSub, PinDirection.Output, 0), DsFindPin.ByDirection(_renderer, PinDirection.Input, 0)); } else { if (_vobsub != null) { Marshal.ReleaseComObject(_vobsub); } _vobsub = null; hr = m_graph.Connect(DsFindPin.ByDirection(_video, PinDirection.Output, 0), DsFindPin.ByDirection(_renderer, PinDirection.Input, 0)); } } /* Loop over each pin of the source filter */ while (pinEnum.Next(pins.Length, pins, fetched) == 0) { IPin cTo; pins[0].ConnectedTo(out cTo); if (cTo == null) { // this should not happen if the filtegraph is manually connected in a good manner hr = filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero); } else { Marshal.ReleaseComObject(cTo); } Marshal.ReleaseComObject(pins[0]); } Marshal.ReleaseComObject(pinEnum); var selector = splitter as IAMStreamSelect; int numstreams; selector.Count(out numstreams); AMMediaType mt; AMStreamSelectInfoFlags fl; SubtitleStreams.Clear(); VideoStreams.Clear(); AudioStreams.Clear(); for (int i = 0; i < numstreams; i++) { int lcid; int group; string name; object o, o2; selector.Info(i, out mt, out fl, out lcid, out group, out name, out o, out o2); switch (group) { case 0: VideoStreams.Add(name); break; case 1: AudioStreams.Add(name); break; case 2: SubtitleStreams.Add(name); break; } if (o != null) { Marshal.ReleaseComObject(o); } if (o2 != null) { Marshal.ReleaseComObject(o2); } } OnPropertyChanged("SubtitleStreams"); OnPropertyChanged("VideoStreams"); OnPropertyChanged("AudioStreams"); /* Configure the graph in the base class */ SetupFilterGraph(m_graph); #if DEBUG /* Adds the GB to the ROT so we can view * it in graphedit */ m_dsRotEntry = new DsROTEntry(m_graph); #endif SIZE a, b; if (HasVideo && _displayControl != null && (_displayControl).GetNativeVideoSize(out a, out b) == 0) { var sz = MediaPlayerBase.GetVideoSize(_renderer, PinDirection.Input, 0); if (a.cx > 0 && a.cy > 0) { SetNativePixelSizes(a); } } if (!subconnected) { InvokeNoSubtitleLoaded(new EventArgs()); } else { InitSubSettings(); } } catch (Exception ex) { /* This exection will happen usually if the media does * not exist or could not open due to not having the * proper filters installed */ FreeResources(); /* Fire our failed event */ InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } InvokeMediaOpened(); }