/// <summary> /// Deinitialize. Release the vertex buffer and the render target resources /// This function should be called at last when playing has been stopped /// </summary> public void Deinit() { GUIWindowManager.Receivers -= new SendMessageHandler(this.OnMessage); GUILayerManager.UnRegisterLayer(this); if (_renderTarget != null) { //VMR9 changes the directx 9 render target. Thats why we set it back to what it was if (!_renderTarget.Disposed) { GUIGraphicsContext.DX9Device.SetRenderTarget(0, _renderTarget); } _renderTarget.SafeDispose(); _renderTarget = null; } for (int i = 0; i < _vertexBuffers.Length; i++) { if (_vertexBuffers[i] != null) { _vertexBuffers[i].SafeDispose(); _vertexBuffers[i] = null; } } if (_blackImage != null) { _blackImage.SafeDispose(); _blackImage = null; } grabber.Clean(); SubtitleRenderer.GetInstance().Clear(); }
public void RenderLayer(float timePassed) { // if (VideoRendererStatistics.VideoState == VideoRendererStatistics.State.VideoPresent) // Before , we got a black screen until GUI_MSG_NOTIFY command has finished // That we don't want ( do we ?) if (VideoRendererStatistics.VideoState == VideoRendererStatistics.State.VideoPresent || VideoRendererStatistics.VideoState == VideoRendererStatistics.State.NoSignal) { if (GUIGraphicsContext.RenderBlackImage) { RenderBlackImage(timePassed); } //Render video texture if (_shouldRenderTexture == false) { return; } if (_textureAddress != 0) { DrawTexture(_textureAddress, _diffuseColor); } } else { GUIGraphicsContext.RenderBlackImage = true; RenderBlackImage(timePassed); GUIGraphicsContext.RenderBlackImage = false; } SubtitleRenderer.GetInstance().Render(); SubEngine.GetInstance().Render(_subsRect, _destinationRect); }
/// <summary> create the used COM components and get the interfaces. </summary> protected bool GetInterfaces() { VMR9Util.g_vmr9 = null; if (IsRadio == false) { Vmr9 = VMR9Util.g_vmr9 = new VMR9Util(); // switch back to directx fullscreen mode Log.Info("RTSPPlayer: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); } //Type comtype = null; //object comobj = null; DsRect rect = new DsRect(); rect.top = 0; rect.bottom = GUIGraphicsContext.form.Height; rect.left = 0; rect.right = GUIGraphicsContext.form.Width; try { graphBuilder = (IGraphBuilder) new FilterGraph(); Log.Info("RTSPPlayer: add source filter"); if (IsRadio == false) { bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder); if (!AddVMR9) { Log.Error("RTSPPlayer:Failed to add VMR9 to graph"); return(false); } VMR9Util.g_vmr9.Enable(false); } _mpegDemux = (IBaseFilter) new MPEG2Demultiplexer(); graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer"); _rtspSource = (IBaseFilter) new RtpSourceFilter(); int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter"); if (hr != 0) { Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr); return(false); } // add preferred video & audio codecs Log.Info("RTSPPlayer: add video/audio codecs"); string strVideoCodec = ""; string strAudioCodec = ""; string strAudiorenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters string postProcessingFilterSection = "mytv"; using (Settings xmlreader = new MPSettings()) { if (_mediaType == g_Player.MediaType.Video) { strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "movieplayer"; } else { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "mytv"; } enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false); // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } } string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant(); if (IsRadio == false) { if (strVideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); } } if (strAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); } if (enableDvbSubtitles == true) { try { _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder); SubtitleRenderer.GetInstance().SetPlayer(this); dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null)); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]); } if (strAudiorenderer.Length > 0) { audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false); } Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile); IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource; if (interfaceFile == null) { Log.Error("RTSPPlayer:Failed to get IFileSourceFilter"); return(false); } //Log.Info("RTSPPlayer: open file:{0}",filename); hr = interfaceFile.Load(m_strCurrentFile, null); if (hr != 0) { Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr); return(false); } #region connect rtspsource->demux Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux"); IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0); if (pinTsOut == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0); if (pinDemuxIn == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } hr = graphBuilder.Connect(pinTsOut, pinDemuxIn); if (hr != 0) { Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr); return(false); } DirectShowUtil.ReleaseComObject(pinTsOut); DirectShowUtil.ReleaseComObject(pinDemuxIn); #endregion #region render demux output pins if (IsRadio) { Log.Info("RTSPPlayer:render audio demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { graphBuilder.Render(pins[0]); break; } } } } else { Log.Info("RTSPPlayer:render audio/video demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } graphBuilder.Render(pins[0]); } } #endregion // Connect DVB subtitle filter pins in the graph if (_mpegDemux != null && enableDvbSubtitles == true) { IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer; hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr); if (hr == 0) { Log.Info("RTSPPlayer:_pinPcr OK"); IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr"); IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr"); hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr); } else { Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle); if (hr == 0) { Log.Info("RTSPPlayer:_pinSubtitle OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT); if (hr == 0) { Log.Info("RTSPPlayer:_pinPMT OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr); } } if (IsRadio == false) { if (!VMR9Util.g_vmr9.IsVMR9Connected) { //VMR9 is not supported, switch to overlay Log.Info("RTSPPlayer: vmr9 not connected"); _mediaCtrl = null; Cleanup(); return(false); } VMR9Util.g_vmr9.SetDeinterlaceMode(); } _mediaCtrl = (IMediaControl)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; _mediaSeeking = (IMediaSeeking)graphBuilder; mediaPos = (IMediaPosition)graphBuilder; basicAudio = graphBuilder as IBasicAudio; //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched); DirectShowUtil.EnableDeInterlace(graphBuilder); if (VMR9Util.g_vmr9 != null) { m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth; m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight; } if (audioRendererFilter != null) { Log.Info("RTSPPlayer9:set reference clock"); IMediaFilter mp = graphBuilder as IMediaFilter; IReferenceClock clock = audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr); } Log.Info("RTSPPlayer: graph build successfull"); return(true); } catch (Exception ex) { Error.SetError("Unable to play movie", "Unable build graph for VMR9"); Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); CloseInterfaces(); return(false); } }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Log.Info("TSReaderPlayer: GetInterfaces()"); try { string strAudioRenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters LoadMyTvFilterSettings(ref intFilters, ref strFilters, ref strVideoCodec, ref strAudioCodec, ref strAACAudioCodec, ref strDDPLUSAudioCodec, ref strH264VideoCodec, ref strAudioRenderer, ref enableDVBBitmapSubtitles, ref enableDVBTtxtSubtitles, ref relaxTsReader); _graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder); if (strAudioRenderer.Length > 0) //audio renderer must be in graph before audio switcher { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudioRenderer, true); } #region add AudioSwitcher if (enableMPAudioSwitcher) //audio switcher must be in graph before tsreader audiochangecallback { _audioSwitcherFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, "MediaPortal AudioSwitcher"); if (_audioSwitcherFilter == null) { Log.Error("TSReaderPlayer: Failed to add AudioSwitcher to graph"); } } #endregion #region add TsReader TsReader reader = new TsReader(); _fileSource = (IBaseFilter)reader; _ireader = (ITSReader)reader; _interfaceTSReader = _fileSource; _ireader.SetRelaxedMode(relaxTsReader); // enable/disable continousity filtering _ireader.SetTsReaderCallback(this); _ireader.SetRequestAudioChangeCallback(this); Log.Info("TSReaderPlayer: Add TsReader to graph"); int hr = _graphBuilder.AddFilter((IBaseFilter)_fileSource, "TsReader"); DsError.ThrowExceptionForHR(hr); #endregion #region load file in TsReader IFileSourceFilter interfaceFile = (IFileSourceFilter)_fileSource; if (interfaceFile == null) { Log.Error("TSReaderPlayer: Failed to get IFileSourceFilter"); Cleanup(); return(false); } Log.Info("TSReaderPlayer: Open file: {0}", filename); hr = interfaceFile.Load(filename, null); if (hr != 0) { Log.Error("TSReaderPlayer: Failed to open file:{0} :0x{1:x}", filename, hr); Cleanup(); return(false); } #endregion #region add codecs Log.Info("TSReaderPlayer: Add codecs"); // add preferred video & audio codecs MatchFilters("Video"); MatchFilters("Audio"); // does .ts file contain video? // default is _isRadio=false which prevents recorded radio file playing if (!_videoFormat.IsValid) { _isRadio = true; } if (!_isRadio) { _vmr9 = new VMR9Util(); _vmr9.AddVMR9(_graphBuilder); _vmr9.Enable(false); DirectShowUtil.AddFilterToGraph(_graphBuilder, videoFilter); if (enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().AddSubtitleFilter(_graphBuilder); } catch (Exception e) { Log.Error(e); } } } DirectShowUtil.AddFilterToGraph(_graphBuilder, audioFilter); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } #endregion #region PostProcessingEngine Detection IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true); if (!postengine.LoadPostProcessing(_graphBuilder)) { PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine(); } #endregion #region render TsReader output pins Log.Info("TSReaderPlayer: Render TsReader outputs"); if (_isRadio) { IEnumPins enumPins; hr = _fileSource.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); IPin[] pins = new IPin[1]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { hr = _graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); break; } } } DirectShowUtil.ReleaseComObject(pins[0]); } DirectShowUtil.ReleaseComObject(enumPins); } else { DirectShowUtil.RenderGraphBuilderOutputPins(_graphBuilder, _fileSource); } DirectShowUtil.RemoveUnusedFiltersFromGraph(_graphBuilder); #endregion _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = (IMediaSeeking)_graphBuilder; if (_mediaSeeking == null) { Log.Error("TSReaderPlayer: Unable to get IMediaSeeking interface"); } _audioStream = (IAudioStream)_fileSource; if (_audioStream == null) { Log.Error("TSReaderPlayer: Unable to get IAudioStream interface"); } _audioSelector = new AudioSelector(_audioStream); if (!_isRadio) { if (enableDVBTtxtSubtitles || enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().SetPlayer(this); _dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } if (enableDVBBitmapSubtitles) { _subtitleStream = (ISubtitleStream)_fileSource; if (_subtitleStream == null) { Log.Error("TSReaderPlayer: Unable to get ISubtitleStream interface"); } } if (enableDVBTtxtSubtitles) { //Log.Debug("TSReaderPlayer: Obtaining TeletextSource"); _teletextSource = (ITeletextSource)_fileSource; if (_teletextSource == null) { Log.Error("TSReaderPlayer: Unable to get ITeletextSource interface"); } Log.Debug("TSReaderPlayer: Creating Teletext Receiver"); TeletextSubtitleDecoder ttxtDecoder = new TeletextSubtitleDecoder(_dvbSubRenderer); _ttxtReceiver = new TeletextReceiver(_teletextSource, ttxtDecoder); // regardless of whether dvb subs are enabled, the following call is okay // if _subtitleStream is null the subtitle will just not setup for bitmap subs _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, ttxtDecoder); } else if (enableDVBBitmapSubtitles) { // if only dvb subs are enabled, pass null for ttxtDecoder _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, null); } } if (_audioRendererFilter != null) { //Log.Info("TSReaderPlayer:set reference clock"); IMediaFilter mp = (IMediaFilter)_graphBuilder; IReferenceClock clock = (IReferenceClock)_audioRendererFilter; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); //Log.Info("TSReaderPlayer:set reference clock:{0:X}", hr); _basicAudio = (IBasicAudio)_graphBuilder; } if (!_isRadio) { IBaseFilter basefilter; _graphBuilder.FindFilterByName("Line 21 Decoder", out basefilter); if (basefilter == null) { _graphBuilder.FindFilterByName("Line21 Decoder", out basefilter); } if (basefilter == null) { _graphBuilder.FindFilterByName("Line 21 Decoder 2", out basefilter); } if (basefilter != null) { Log.Info("TSreaderPlayer: Line21 Decoder (Closed Captions), in use"); //: {0}", showClosedCaptions); _line21Decoder = (IAMLine21Decoder)basefilter; if (_line21Decoder != null) { AMLine21CCState state = AMLine21CCState.Off; hr = _line21Decoder.SetServiceState(state); if (hr == 0) { Log.Info("TSReaderPlayer: Closed Captions state change successful"); } else { Log.Info("TSReaderPlayer: Failed to change Closed Captions state"); } } } if (!_vmr9.IsVMR9Connected) { Log.Error("TSReaderPlayer: Failed vmr9 not connected"); Cleanup(); return(false); } DirectShowUtil.EnableDeInterlace(_graphBuilder); _vmr9.SetDeinterlaceMode(); } using (MPSettings xmlreader = new MPSettings()) { int lastSubIndex = xmlreader.GetValueAsInt("tvservice", "lastsubtitleindex", 0); Log.Debug("TSReaderPlayer: Last subtitle index: {0}", lastSubIndex); CurrentSubtitleStream = lastSubIndex; } return(true); } catch (Exception ex) { Log.Error("TSReaderPlayer: Exception while creating DShow graph {0}", ex.Message); Cleanup(); return(false); } }