public void TestSyncSource() { int hr = 0; IReferenceClock systemClock = (IReferenceClock) new SystemClock(); IReferenceClock readClock = null; // Try to assign the System Clock hr = this.filter.SetSyncSource(systemClock); Marshal.ThrowExceptionForHR(hr); hr = this.filter.GetSyncSource(out readClock); Marshal.ThrowExceptionForHR(hr); Debug.Assert(readClock == systemClock, "IMediaFilter.GetSyncSource"); // Try to assign no clock at all hr = this.filter.SetSyncSource(null); Marshal.ThrowExceptionForHR(hr); hr = this.filter.GetSyncSource(out readClock); Marshal.ThrowExceptionForHR(hr); Debug.Assert(readClock == null, "IMediaFilter.GetSyncSource"); // SetSyncSource method do an AddRef. Release all instances... while (Marshal.ReleaseComObject(systemClock) > 0) { ; } }
public void DoTests() { Configure(); IReferenceClock systemClock = (IReferenceClock) new SystemClock(); int hr = m_ifg3.SetSyncSourceEx(systemClock, systemClock, m_ibf); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(m_ibf); Marshal.ReleaseComObject(m_ifg3); }
/// <summary> /// Connect to existing filtergraph /// </summary> public DSGraphEditPanel(IFilterGraph filterGraph) { InitializeComponent(); // create filter graph _graph = filterGraph; // give the filter graph to the DaggerUIGraph dsDaggerUIGraph1._Graph = _graph; // Initialize items common to all constructors Init(); // get the state _mediaControl.GetState(100, out _mediaState); // match our state to the existing state switch (_mediaState) { case FilterState.Paused: Pause(); break; case FilterState.Running: Play(); break; case FilterState.Stopped: Stop(); break; default: break; } dsDaggerUIGraph1.SyncGraphs(null); dsDaggerUIGraph1.ArrangeNodes(AutoArrangeStyle.All); // see if this graph has a reference clock IReferenceClock rc = null; (filterGraph as IMediaFilter).GetSyncSource(out rc); if (rc == null) { _useReferenceClock = false; } }
private void TestEm() { const int offset = 3000; int hr; long l, l2; IAMClockAdjust ca = new SystemClock() as IAMClockAdjust; IReferenceClock rc = ca as IReferenceClock; hr = rc.GetTime(out l); DsError.ThrowExceptionForHR(hr); hr = ca.SetClockDelta(offset); DsError.ThrowExceptionForHR(hr); hr = rc.GetTime(out l2); DsError.ThrowExceptionForHR(hr); Debug.Assert(l2 - l >= offset, "SetClockDelta"); }
/// <summary> create the used COM components and get the interfaces. </summary> protected bool GetInterfaces() { VMR9Util.g_vmr9 = null; if (IsRadio == false) { Vmr9 = VMR9Util.g_vmr9 = new VMR9Util(); // switch back to directx fullscreen mode Log.Info("RTSPPlayer: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); } //Type comtype = null; //object comobj = null; DsRect rect = new DsRect(); rect.top = 0; rect.bottom = GUIGraphicsContext.form.Height; rect.left = 0; rect.right = GUIGraphicsContext.form.Width; try { graphBuilder = (IGraphBuilder) new FilterGraph(); Log.Info("RTSPPlayer: add source filter"); if (IsRadio == false) { bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder); if (!AddVMR9) { Log.Error("RTSPPlayer:Failed to add VMR9 to graph"); return(false); } VMR9Util.g_vmr9.Enable(false); } _mpegDemux = (IBaseFilter) new MPEG2Demultiplexer(); graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer"); _rtspSource = (IBaseFilter) new RtpSourceFilter(); int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter"); if (hr != 0) { Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr); return(false); } // add preferred video & audio codecs Log.Info("RTSPPlayer: add video/audio codecs"); string strVideoCodec = ""; string strAudioCodec = ""; string strAudiorenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters string postProcessingFilterSection = "mytv"; using (Settings xmlreader = new MPSettings()) { if (_mediaType == g_Player.MediaType.Video) { strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "movieplayer"; } else { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "mytv"; } enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false); // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } } string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant(); if (IsRadio == false) { if (strVideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); } } if (strAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); } if (enableDvbSubtitles == true) { try { _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder); SubtitleRenderer.GetInstance().SetPlayer(this); dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null)); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]); } if (strAudiorenderer.Length > 0) { audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false); } Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile); IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource; if (interfaceFile == null) { Log.Error("RTSPPlayer:Failed to get IFileSourceFilter"); return(false); } //Log.Info("RTSPPlayer: open file:{0}",filename); hr = interfaceFile.Load(m_strCurrentFile, null); if (hr != 0) { Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr); return(false); } #region connect rtspsource->demux Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux"); IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0); if (pinTsOut == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0); if (pinDemuxIn == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } hr = graphBuilder.Connect(pinTsOut, pinDemuxIn); if (hr != 0) { Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr); return(false); } DirectShowUtil.ReleaseComObject(pinTsOut); DirectShowUtil.ReleaseComObject(pinDemuxIn); #endregion #region render demux output pins if (IsRadio) { Log.Info("RTSPPlayer:render audio demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { graphBuilder.Render(pins[0]); break; } } } } else { Log.Info("RTSPPlayer:render audio/video demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } graphBuilder.Render(pins[0]); } } #endregion // Connect DVB subtitle filter pins in the graph if (_mpegDemux != null && enableDvbSubtitles == true) { IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer; hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr); if (hr == 0) { Log.Info("RTSPPlayer:_pinPcr OK"); IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr"); IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr"); hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr); } else { Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle); if (hr == 0) { Log.Info("RTSPPlayer:_pinSubtitle OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT); if (hr == 0) { Log.Info("RTSPPlayer:_pinPMT OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr); } } if (IsRadio == false) { if (!VMR9Util.g_vmr9.IsVMR9Connected) { //VMR9 is not supported, switch to overlay Log.Info("RTSPPlayer: vmr9 not connected"); _mediaCtrl = null; Cleanup(); return(false); } VMR9Util.g_vmr9.SetDeinterlaceMode(); } _mediaCtrl = (IMediaControl)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; _mediaSeeking = (IMediaSeeking)graphBuilder; mediaPos = (IMediaPosition)graphBuilder; basicAudio = graphBuilder as IBasicAudio; //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched); DirectShowUtil.EnableDeInterlace(graphBuilder); if (VMR9Util.g_vmr9 != null) { m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth; m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight; } if (audioRendererFilter != null) { Log.Info("RTSPPlayer9:set reference clock"); IMediaFilter mp = graphBuilder as IMediaFilter; IReferenceClock clock = audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr); } Log.Info("RTSPPlayer: graph build successfull"); return(true); } catch (Exception ex) { Error.SetError("Unable to play movie", "Unable build graph for VMR9"); Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); CloseInterfaces(); return(false); } }
public static void SetSyncSource(IGraphBuilder graphBuilder, IReferenceClock clock) { int hr = 0; IEnumFilters enumFilters = null; if (graphBuilder == null) throw new ArgumentNullException("graphBuilder"); hr = graphBuilder.EnumFilters(out enumFilters); if (hr == 0) { IBaseFilter[] filters = new IBaseFilter[1]; while (enumFilters.Next(filters.Length, filters, IntPtr.Zero) == 0) { hr = filters[0].SetSyncSource(clock); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(filters[0]); } Marshal.ReleaseComObject(enumFilters); } }
/// <summary> /// Dispose the resources used by this capture class. /// Stops the graph first. /// </summary> public void Dispose() { // Stop the graph if it is running (ignore errors) this.Stop(); // Free the preview window (ignore errors) if (videoWindow != null) { videoWindow.put_Visible(OABool.False); videoWindow.put_Owner(IntPtr.Zero); videoWindow = null; } // Remove the Resize event handler if (this.captureDeviceProperties.PreviewWindow != null) { this.captureDeviceProperties.PreviewWindow.Resize -= new EventHandler(onPreviewWindowResize); } #if DEBUG // Remove graph from the ROT if (rotCookie != null) { rotCookie.Dispose(); rotCookie = null; } #endif // Cleanup if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); graphBuilder = null; } if (captureGraphBuilder != null) { Marshal.ReleaseComObject(captureGraphBuilder); captureGraphBuilder = null; } if (muxFilter != null) { Marshal.ReleaseComObject(muxFilter); muxFilter = null; } if (fileWriterFilter != null) { Marshal.ReleaseComObject(fileWriterFilter); fileWriterFilter = null; } if (this.videoStreamConfig != null) { Marshal.ReleaseComObject(videoStreamConfig); videoStreamConfig = null; } if (this.referenceClock != null) { Marshal.ReleaseComObject(referenceClock); referenceClock = null; } if (this.VideoDeviceFilter != null) { Marshal.ReleaseComObject(VideoDeviceFilter); VideoDeviceFilter = null; } if (this.AudioDeviceFilter != null) { Marshal.ReleaseComObject(AudioDeviceFilter); AudioDeviceFilter = null; } if (this.VideoCompressorFilter != null) { Marshal.ReleaseComObject(VideoCompressorFilter); VideoCompressorFilter = null; } if (this.AudioCompressorFilter != null) { Marshal.ReleaseComObject(AudioCompressorFilter); AudioCompressorFilter = null; } // These are copies of graphBuilder mediaControl = null; videoWindow = null; this.HasValidGraph = false; // For unmanaged objects we haven't released explicitly GC.Collect(); }
public virtual int GetSyncSource(out IReferenceClock pClock) { pClock = _Clock; return((int)HRESULT.S_OK); }
public virtual int SetSyncSource(IReferenceClock pClock) { _Clock = pClock; return((int)HRESULT.S_OK); }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Log.Info("TSReaderPlayer: GetInterfaces()"); try { string strAudioRenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters LoadMyTvFilterSettings(ref intFilters, ref strFilters, ref strVideoCodec, ref strAudioCodec, ref strAACAudioCodec, ref strDDPLUSAudioCodec, ref strH264VideoCodec, ref strAudioRenderer, ref enableDVBBitmapSubtitles, ref enableDVBTtxtSubtitles, ref relaxTsReader); _graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder); if (strAudioRenderer.Length > 0) //audio renderer must be in graph before audio switcher { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudioRenderer, true); } #region add AudioSwitcher if (enableMPAudioSwitcher) //audio switcher must be in graph before tsreader audiochangecallback { _audioSwitcherFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, "MediaPortal AudioSwitcher"); if (_audioSwitcherFilter == null) { Log.Error("TSReaderPlayer: Failed to add AudioSwitcher to graph"); } } #endregion #region add TsReader TsReader reader = new TsReader(); _fileSource = (IBaseFilter)reader; _ireader = (ITSReader)reader; _interfaceTSReader = _fileSource; _ireader.SetRelaxedMode(relaxTsReader); // enable/disable continousity filtering _ireader.SetTsReaderCallback(this); _ireader.SetRequestAudioChangeCallback(this); Log.Info("TSReaderPlayer: Add TsReader to graph"); int hr = _graphBuilder.AddFilter((IBaseFilter)_fileSource, "TsReader"); DsError.ThrowExceptionForHR(hr); #endregion #region load file in TsReader IFileSourceFilter interfaceFile = (IFileSourceFilter)_fileSource; if (interfaceFile == null) { Log.Error("TSReaderPlayer: Failed to get IFileSourceFilter"); Cleanup(); return(false); } Log.Info("TSReaderPlayer: Open file: {0}", filename); hr = interfaceFile.Load(filename, null); if (hr != 0) { Log.Error("TSReaderPlayer: Failed to open file:{0} :0x{1:x}", filename, hr); Cleanup(); return(false); } #endregion #region add codecs Log.Info("TSReaderPlayer: Add codecs"); // add preferred video & audio codecs MatchFilters("Video"); MatchFilters("Audio"); // does .ts file contain video? // default is _isRadio=false which prevents recorded radio file playing if (!_videoFormat.IsValid) { _isRadio = true; } if (!_isRadio) { _vmr9 = new VMR9Util(); _vmr9.AddVMR9(_graphBuilder); _vmr9.Enable(false); DirectShowUtil.AddFilterToGraph(_graphBuilder, videoFilter); if (enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().AddSubtitleFilter(_graphBuilder); } catch (Exception e) { Log.Error(e); } } } DirectShowUtil.AddFilterToGraph(_graphBuilder, audioFilter); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } #endregion #region PostProcessingEngine Detection IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true); if (!postengine.LoadPostProcessing(_graphBuilder)) { PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine(); } #endregion #region render TsReader output pins Log.Info("TSReaderPlayer: Render TsReader outputs"); if (_isRadio) { IEnumPins enumPins; hr = _fileSource.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); IPin[] pins = new IPin[1]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { hr = _graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); break; } } } DirectShowUtil.ReleaseComObject(pins[0]); } DirectShowUtil.ReleaseComObject(enumPins); } else { DirectShowUtil.RenderGraphBuilderOutputPins(_graphBuilder, _fileSource); } DirectShowUtil.RemoveUnusedFiltersFromGraph(_graphBuilder); #endregion _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = (IMediaSeeking)_graphBuilder; if (_mediaSeeking == null) { Log.Error("TSReaderPlayer: Unable to get IMediaSeeking interface"); } _audioStream = (IAudioStream)_fileSource; if (_audioStream == null) { Log.Error("TSReaderPlayer: Unable to get IAudioStream interface"); } _audioSelector = new AudioSelector(_audioStream); if (!_isRadio) { if (enableDVBTtxtSubtitles || enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().SetPlayer(this); _dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } if (enableDVBBitmapSubtitles) { _subtitleStream = (ISubtitleStream)_fileSource; if (_subtitleStream == null) { Log.Error("TSReaderPlayer: Unable to get ISubtitleStream interface"); } } if (enableDVBTtxtSubtitles) { //Log.Debug("TSReaderPlayer: Obtaining TeletextSource"); _teletextSource = (ITeletextSource)_fileSource; if (_teletextSource == null) { Log.Error("TSReaderPlayer: Unable to get ITeletextSource interface"); } Log.Debug("TSReaderPlayer: Creating Teletext Receiver"); TeletextSubtitleDecoder ttxtDecoder = new TeletextSubtitleDecoder(_dvbSubRenderer); _ttxtReceiver = new TeletextReceiver(_teletextSource, ttxtDecoder); // regardless of whether dvb subs are enabled, the following call is okay // if _subtitleStream is null the subtitle will just not setup for bitmap subs _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, ttxtDecoder); } else if (enableDVBBitmapSubtitles) { // if only dvb subs are enabled, pass null for ttxtDecoder _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, null); } } if (_audioRendererFilter != null) { //Log.Info("TSReaderPlayer:set reference clock"); IMediaFilter mp = (IMediaFilter)_graphBuilder; IReferenceClock clock = (IReferenceClock)_audioRendererFilter; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); //Log.Info("TSReaderPlayer:set reference clock:{0:X}", hr); _basicAudio = (IBasicAudio)_graphBuilder; } if (!_isRadio) { IBaseFilter basefilter; _graphBuilder.FindFilterByName("Line 21 Decoder", out basefilter); if (basefilter == null) { _graphBuilder.FindFilterByName("Line21 Decoder", out basefilter); } if (basefilter == null) { _graphBuilder.FindFilterByName("Line 21 Decoder 2", out basefilter); } if (basefilter != null) { Log.Info("TSreaderPlayer: Line21 Decoder (Closed Captions), in use"); //: {0}", showClosedCaptions); _line21Decoder = (IAMLine21Decoder)basefilter; if (_line21Decoder != null) { AMLine21CCState state = AMLine21CCState.Off; hr = _line21Decoder.SetServiceState(state); if (hr == 0) { Log.Info("TSReaderPlayer: Closed Captions state change successful"); } else { Log.Info("TSReaderPlayer: Failed to change Closed Captions state"); } } } if (!_vmr9.IsVMR9Connected) { Log.Error("TSReaderPlayer: Failed vmr9 not connected"); Cleanup(); return(false); } DirectShowUtil.EnableDeInterlace(_graphBuilder); _vmr9.SetDeinterlaceMode(); } using (MPSettings xmlreader = new MPSettings()) { int lastSubIndex = xmlreader.GetValueAsInt("tvservice", "lastsubtitleindex", 0); Log.Debug("TSReaderPlayer: Last subtitle index: {0}", lastSubIndex); CurrentSubtitleStream = lastSubIndex; } return(true); } catch (Exception ex) { Log.Error("TSReaderPlayer: Exception while creating DShow graph {0}", ex.Message); Cleanup(); return(false); } }
/// <summary> /// Event that is raised after a DaggerNode has been created and associated to the UI element /// </summary> /// <param name="node"></param> void DSFilterNodeUI_DaggerNodeAttached(DaggerLib.Core.DaggerNode node) { _dsfilternode = (DSFilterNode)node; CaptionText = node.ToString(); // hook the AfterNodeRemoved event to dispose of any directshow interfaces node.AfterNodeRemoved += new DaggerLib.Core.AfterNodeRemoveHandler(node_AfterNodeRemoved); // get the IBaseFilter from the DSFilterNode IBaseFilter filter = _dsfilternode._filter; // only grab the video window or EVR if it was manually added to the graph via the UI if (_dsfilternode._manualAdded || (_dsfilternode.ParentGraph.ParentUIGraph as DSDaggerUIGraph)._filterGraphCreated) { // if it supports IVideoWindow create a VideoInternalWindow for it IVideoWindow vw = filter as IVideoWindow; if (vw != null) { try { _videoWindow = new VideoInternalWindow(CaptionText, filter); _videoWindow.Dock = DockStyle.Fill; _videoWindow.Visible = true; InternalControl.Controls.Add(_videoWindow); // only nodes with video windows are resizeable Resizable = true; // hook the connection events to init/deinit the video window node.ParentGraph.AfterPinsConnected += new DaggerLib.Core.PinAfterConnectedHandler(ParentGraph_AfterPinsConnected); } catch (Exception ex) { #if DEBUG MessageBox.Show(ex.Message); #endif _videoWindow = null; } } // if it's an Enhaced Video Renderer create a VideoInternalWindow for it // (see docs for Windows Media Foundation) IMFGetService mfgs = filter as IMFGetService; if (mfgs != null) { // this is a video horse of a different color // create a video clipping window for the Media Foundation Enhanced Video Renderer try { // get the IMFVideoDisplayControl for the EVR filter object o = null; mfgs.GetService(MediaFoundation.MFServices.MR_VIDEO_RENDER_SERVICE, typeof(IMFVideoDisplayControl).GUID, out o ); m_pVideoDisplay = o as IMFVideoDisplayControl; // if the Video Size is 0,0 the EVR hasn't been initialized/connected yet MediaFoundation.Misc.SIZE videoSize = new MediaFoundation.Misc.SIZE(); MediaFoundation.Misc.SIZE ar = new MediaFoundation.Misc.SIZE(); m_pVideoDisplay.GetNativeVideoSize(videoSize, ar); if (videoSize.cx == 0 && videoSize.cy == 0) { // You only get one chance to set the number of pins in an EVR filter. PinsComboBoxForm pcf = new PinsComboBoxForm(); if (pcf.ShowDialog() == DialogResult.OK) { (filter as IEVRFilterConfig).SetNumberOfStreams(pcf.Value); } pcf.Dispose(); } _videoWindow = new VideoInternalWindow(CaptionText, m_pVideoDisplay); _videoWindow.Dock = DockStyle.Fill; _videoWindow.Visible = true; InternalControl.Controls.Add(_videoWindow); // only nodes with video windows are resizeable Resizable = true; // hook the connection events to init/deinit the video window node.ParentGraph.AfterPinsConnected += new DaggerLib.Core.PinAfterConnectedHandler(ParentGraph_AfterPinsConnected); } catch (InvalidCastException) { m_pVideoDisplay = null; } } } // if it's a DMO, create the DMO properties page for it if ((filter as IDMOWrapperFilter) != null) { // set the caption to show it's a DMO CaptionText = "DMO - " + CaptionText; CaptionColor = Color.Green; CaptionColorUnfocused = Color.LightGreen; } // remove clock button if it doesn't support IReferenceClock _referenceClock = filter as IReferenceClock; if (_referenceClock == null) { CaptionButtons.RemoveAt(CaptionButtons.AllButtons.IndexOf(_clockButton)); } else { // see if this filter is the reference clock for the graph IReferenceClock graphClock = null; filter.GetSyncSource(out graphClock); _clockButton.Tag = false; _clockButton.MouseOutsideTint = Color.DarkGray; if (graphClock != null) { if (graphClock == _referenceClock) { _clockButton.MouseOutsideTint = Color.Yellow; _clockButton.Tag = true; } Marshal.ReleaseComObject(graphClock); } } // remove video window button if it's not a video window if (_videoWindow == null) { CaptionButtons.RemoveAt(CaptionButtons.AllButtons.IndexOf(_detachVideoWindowButton)); } // Sync the pins to the Pin Property Pages SyncPinPropertyPages(null); // set it to the smallest possible size. DaggerLib uses InternalControlMinimumSize // to prevent the UI node from being smaller than designated this.Size = new Size(1, 1); }
public int SetSyncSource(IReferenceClock pClock) { syncSource = pClock; return(S_OK); }
public int GetSyncSource(out IReferenceClock pClock) { pClock = syncSource; return(S_OK); }
public void CaptureVideo() { int hr = 0; try { int width = INI.Default["DirectShow Player"]["Video Player/Device Source Width", "352"].Integer; int Height = INI.Default["DirectShow Player"]["Video Player/Device Source Height", "240"].Integer; int fps = INI.Default["DirectShow Player"]["Video Player/Frames Per Second (0 unlimited)", "30"].Integer; bool antenna_input = INI.Default[Options.ProgramName]["Video Player/Capture Tuner from antenna", "true"].Bool; bool capture_TV = INI.Default[Options.ProgramName]["Video Player/Capture Tuner", "true"].Bool; bool capture_is_audio = INI.Default[Options.ProgramName]["Video Player/Capture Video is Audio also", "true"].Bool; IPin cap_pin = null; IPin cap_audio_pin; crossbar_to_tuner = capture_TV; // Get DirectShow interfaces GetInterfaces(); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = this.DeviceFinder.FindVideoCaptureDevice(false); if (sourceFilter == null) { Network.SendStatus(channel, volume, false); return; } #if use_bda bda_filter = FindVideoCaptureDevice(true); #endif if (!capture_is_audio) { sourceAudioFilter = this.DeviceFinder.FindAudioCaptureDevice(); } else { sourceAudioFilter = sourceFilter; } //reclock_video_filter = FindVideoRenderDevice(); //scale_filter = FindVideoScaleDevice(); reclock_filter = this.DeviceFinder.FindAudioRenderDevice(); IAMAnalogVideoDecoder decoder = sourceFilter as IAMAnalogVideoDecoder; if (decoder != null) { AnalogVideoStandard oldStandard; decoder.get_TVFormat(out oldStandard); if (oldStandard != AnalogVideoStandard.NTSC_M) { decoder.put_TVFormat(AnalogVideoStandard.NTSC_M); } decoder = null; } // this is really for which input - the tuner we shouldn't adjust //if( !capture_TV ) // Add Capture filter to our graph. hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture"); DsError.ThrowExceptionForHR(hr); if (scale_filter != null) { hr = this.graphBuilder.AddFilter(scale_filter, "Video Scaler"); DsError.ThrowExceptionForHR(hr); } this.graphBuilder.Connect(null, null); #if use_bda if (bda_filter != null) { hr = this.graphBuilder.AddFilter(bda_filter, "Video Tuner"); DsError.ThrowExceptionForHR(hr); } #endif if (capture_TV && !capture_is_audio) { if (sourceAudioFilter != null) { hr = this.graphBuilder.AddFilter(sourceAudioFilter, "Audio Capture"); DsError.ThrowExceptionForHR(hr); } } if (reclock_filter != null) { Log.log("Adding 'reclock' which is the audio output device?"); hr = this.graphBuilder.AddFilter(reclock_filter, "Audio Renderer"); DsError.ThrowExceptionForHR(hr); } //this.graphBuilder.AddFilter( AdjustCrossbarPin(); bool cap_is_preview; { // set the video input size on the preview pin. cap_audio_pin = DsFindPin.ByCategory((IBaseFilter)sourceAudioFilter, PinCategory.Preview, 0); cap_pin = DsFindPin.ByCategory((IBaseFilter)sourceFilter, PinCategory.Preview, 0); if (cap_pin == null) { cap_is_preview = false; cap_audio_pin = DsFindPin.ByCategory((IBaseFilter)sourceAudioFilter, PinCategory.Capture, 0); cap_pin = DsFindPin.ByCategory((IBaseFilter)sourceFilter, PinCategory.Capture, 0); } else { cap_is_preview = true; } //Log.log( "Cap pin + " + cap_pin ); } // Render the preview pin on the video capture filter // Use this instead of this.graphBuilder.RenderFile if (cap_is_preview) { //hr = this.captureGraphBuilder.RenderStream( PinCategory.Preview, MediaType.Video, scale_filter, null, null ); //DsError.ThrowExceptionForHR( hr ); hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, null, null); DsError.ThrowExceptionForHR(hr); if (sourceAudioFilter != null) { hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Audio, sourceAudioFilter, null, reclock_filter); DsError.ThrowExceptionForHR(hr); } } else { //hr = this.captureGraphBuilder.RenderStream( PinCategory.Capture, MediaType.Video, scale_filter, null, null ); //DsError.ThrowExceptionForHR( hr ); hr = this.captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, sourceFilter, null, null); DsError.ThrowExceptionForHR(hr); if (sourceAudioFilter != null) { //IBaseFilter renderer = null; //Log.log( "reclock is " + reclock_filter ); hr = this.captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Audio, sourceAudioFilter, null, reclock_filter); if (hr != 0) { Log.log("Bad audio stream"); } //DsError.ThrowExceptionForHR( hr ); } } IAMStreamConfig stream = cap_pin as IAMStreamConfig; if (stream != null) { // 352x240 AMMediaType media; VideoInfoHeader vih = new VideoInfoHeader(); stream.GetFormat(out media); Marshal.PtrToStructure(media.formatPtr, vih); //vih.BmiHeader.Width = width; //vih.BmiHeader.Height = Height; if (fps > 0) { vih.AvgTimePerFrame = (10000000L / fps); } //Log.log( "set the bitmap override..." ); Marshal.StructureToPtr(vih, media.formatPtr, false); hr = stream.SetFormat(media); if (hr != 0) { Log.log("Failed to set format (preview)." + hr); } } else { Log.log("Failed to get stream config from source filter"); } //graph_filter.SetSyncSource( ref_clock ); object o; hr = captureGraphBuilder.FindInterface(null, null, sourceFilter, typeof(IReferenceClock).GUID, out o); if (hr == 0) { ref_clock = (IReferenceClock)o; } if (ref_clock == null) { hr = captureGraphBuilder.FindInterface(null, null, sourceAudioFilter, typeof(IReferenceClock).GUID, out o); ref_clock = (IReferenceClock)o; } hr = captureGraphBuilder.FindInterface(null, null, sourceFilter, typeof(IAMTVTuner).GUID, out o); //graphBuilder.sa. if (hr >= 0) { tuner = (IAMTVTuner)o; o = null; } if (tuner != null) { if (antenna_input) { TunerInputType type; hr = tuner.get_InputType(0, out type); if (type != TunerInputType.Antenna) { tuner.put_InputType(0, TunerInputType.Antenna); hr = tuner.get_InputType(0, out type); } } else { if (tuner != null) { TunerInputType type; hr = tuner.get_InputType(0, out type); if (type != TunerInputType.Cable) { tuner.put_InputType(0, TunerInputType.Cable); hr = tuner.get_InputType(0, out type); } } } tuner.ChannelMinMax(out min_channel, out max_channel); min_channel = INI.Default["DirectShow Player"]["Video Player/Minimum Channel", min_channel.ToString()].Integer; max_channel = INI.Default["DirectShow Player"]["Video Player/Maximum Channel", max_channel.ToString()].Integer; } // Now that the filter has been added to the graph and we have // rendered its stream, we can release this reference to the filter. if (sourceAudioFilter != null) { //hr = captureGraphBuilder.FindInterface( null, null, sourceFilter, typeof( IAMTVAudio ).GUID, out o ); hr = captureGraphBuilder.FindInterface(null, null, sourceAudioFilter, typeof(IBasicAudio).GUID, out o); if (hr >= 0) { audio_mixer = (IBasicAudio)o; o = null; } } Marshal.ReleaseComObject(sourceFilter); if (audio_mixer != null) { audio_mixer.get_Volume(out volume); } if (tuner != null) { tuner.get_Channel(out channel, out sub_channel, out sub_channel2); } //this.graphBuilder.SetDefaultSyncSource(); if (ref_clock != null) { this.graph_filter.SetSyncSource(ref_clock); } graph_streams.SyncUsingStreamOffset(true); // Set video window style and position SetupVideoWindow(); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); //this.mediaControl.set // Start previewing video data hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); // Remember current state this.currentState = PlayState.Running; Network.SendStatus(channel, volume, (this.currentState == PlayState.Running)); } catch (Exception e) { MessageBox.Show("An unrecoverable error has occurred : " + e.Message); this.DialogResult = DialogResult.Abort; this.Close(); Application.Exit(); } }
public int GetSyncSource(out IReferenceClock pClock) { pClock = syncSource; return S_OK; }
public int SetSyncSource(IReferenceClock pClock) { syncSource = pClock; return S_OK; }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Speed = 1; Log.Info("StreamBufferPlayer9: GetInterfaces()"); //switch back to directx fullscreen mode // Log.Info("StreamBufferPlayer9: switch to fullscreen mode"); Log.Info("StreamBufferPlayer9: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); //Log.Info("StreamBufferPlayer9: build graph"); try { _graphBuilder = (IGraphBuilder) new FilterGraph(); //Log.Info("StreamBufferPlayer9: add _vmr9"); _vmr9 = new VMR9Util(); _vmr9.AddVMR9(_graphBuilder); _vmr9.Enable(false); int hr; m_StreamBufferConfig = new StreamBufferConfig(); streamConfig2 = m_StreamBufferConfig as IStreamBufferConfigure2; if (streamConfig2 != null) { // setting the StreamBufferEngine registry key IntPtr HKEY = (IntPtr) unchecked ((int)0x80000002L); IStreamBufferInitialize pTemp = (IStreamBufferInitialize)streamConfig2; IntPtr subKey = IntPtr.Zero; RegOpenKeyEx(HKEY, "SOFTWARE\\MediaPortal", 0, 0x3f, out subKey); hr = pTemp.SetHKEY(subKey); hr = streamConfig2.SetFFTransitionRates(8, 32); //Log.Info("set FFTransitionRates:{0:X}",hr); int max, maxnon; hr = streamConfig2.GetFFTransitionRates(out max, out maxnon); streamConfig2.GetBackingFileCount(out _minBackingFiles, out _maxBackingFiles); streamConfig2.GetBackingFileDuration(out _backingFileDuration); } //Log.Info("StreamBufferPlayer9: add sbe"); // create SBE source _bufferSource = (IStreamBufferSource) new StreamBufferSource(); if (_bufferSource == null) { Log.Error("StreamBufferPlayer9:Failed to create instance of SBE (do you have WinXp SP1?)"); return(false); } IBaseFilter filter = (IBaseFilter)_bufferSource; hr = _graphBuilder.AddFilter(filter, "SBE SOURCE"); if (hr != 0) { Log.Error("StreamBufferPlayer9:Failed to add SBE to graph"); return(false); } IFileSourceFilter fileSource = (IFileSourceFilter)_bufferSource; if (fileSource == null) { Log.Error("StreamBufferPlayer9:Failed to get IFileSourceFilter"); return(false); } //Log.Info("StreamBufferPlayer9: open file:{0}",filename); hr = fileSource.Load(filename, null); if (hr != 0) { Log.Error("StreamBufferPlayer9:Failed to open file:{0} :0x{1:x}", filename, hr); return(false); } //Log.Info("StreamBufferPlayer9: add codecs"); // add preferred video & audio codecs string strVideoCodec = ""; string strAudioCodec = ""; string strAudioRenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters using (Settings xmlreader = new MPSettings()) { // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString("mytv", "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool("mytv", "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString("mytv", "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudioRenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); string strValue = xmlreader.GetValueAsString("mytv", "defaultar", "Normal"); GUIGraphicsContext.ARType = Util.Utils.GetAspectRatio(strValue); } if (strVideoCodec.Length > 0) { _videoCodecFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, strVideoCodec); } if (strAudioCodec.Length > 0) { _audioCodecFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, strAudioCodec); } if (strAudioRenderer.Length > 0) { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudioRenderer, true); } // FlipGer: add custom filters to graph customFilters = new IBaseFilter[intFilters]; string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { customFilters[i] = DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } // render output pins of SBE DirectShowUtil.RenderOutputPins(_graphBuilder, (IBaseFilter)fileSource); _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = _bufferSource as IStreamBufferMediaSeeking; _mediaSeeking2 = _bufferSource as IStreamBufferMediaSeeking2; if (_mediaSeeking == null) { Log.Error("Unable to get IMediaSeeking interface#1"); } if (_mediaSeeking2 == null) { Log.Error("Unable to get IMediaSeeking interface#2"); } if (_audioRendererFilter != null) { IMediaFilter mp = _graphBuilder as IMediaFilter; IReferenceClock clock = _audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(clock); } // Set the IBasicAudioInterface _basicAudio = (IBasicAudio)_graphBuilder; // Log.Info("StreamBufferPlayer9:SetARMode"); // DirectShowUtil.SetARMode(_graphBuilder,AspectRatioMode.Stretched); //Log.Info("StreamBufferPlayer9: set Deinterlace"); if (!_vmr9.IsVMR9Connected) { //_vmr9 is not supported, switch to overlay Log.Info("StreamBufferPlayer9: switch to overlay"); _mediaCtrl = null; Cleanup(); return(base.GetInterfaces(filename)); } _pinVmr9ConnectedTo = _vmr9.PinConnectedTo; _vmr9.SetDeinterlaceMode(); return(true); } catch (Exception ex) { Log.Error("StreamBufferPlayer9:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); return(false); } }