/// <summary> /// Creation of the fgm and the adding / removing of filters needs to happen on the /// same thread. So make sure it all happens on the UI thread. /// </summary> private void _RtpStream_FirstFrameReceived() { lock (fgmLock) { DisposeFgm(); Debug.Assert(fgm == null); // Create the DirectShow filter graph manager fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot((IGraphBuilder)fgm); IBaseFilter bfSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(rtpStream); iGB.AddFilter(bfSource, "RtpSource"); iGB.Render(Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); DisableDXVA(fgm); // Render the video inside of the form iVW = (IVideoWindow)fgm; // Get the correct ratio to use for the video stretching // I would expect the fgm to always be castable to this, but I simply don't trust DShow IBasicVideo iBV = fgm as IBasicVideo; if (iBV != null) { int vidWidth, vidHeight; iBV.GetVideoSize(out vidWidth, out vidHeight); vidSrcRatio = (double)vidHeight / (double)vidWidth; } // Remove the border from the default DShow renderer UI int ws = WindowStyle; ws = ws & ~(0x00800000); // Remove WS_BORDER ws = ws & ~(0x00400000); // Remove WS_DLGFRAME WindowStyle = ws; iVW = null; uiState &= ~(int)FAudioVideo.UIState.RemoteVideoStopped; if (form != null) { ((FAudioVideo)form).UpdateVideoUI(uiState); } // FirstFrameReceived interprets fgmState as the *desired* state for the fgm // Because ResumePlayingVideo won't actually start if the state is already // Running, we change it to Stopped so that it will start if (IsPlaying && fgmState == FilterGraph.State.Running) { fgmState = FilterGraph.State.Stopped; ResumePlayingVideo(); } } }
/// <summary> /// User has clicked on a render pin menu item /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void renderPinMenuItem_Click(object sender, EventArgs e) { int hr = 0; // get the DaggerPin DSOutputPin pin = (sender as ToolStripMenuItem).Tag as DSOutputPin; // get the Parent UIGraph DSDaggerUIGraph parentui = this.Parent as DSDaggerUIGraph; // get the FilterGraph IGraphBuilder graph = parentui._Graph as IGraphBuilder; // atempt to render the pin try { hr = graph.Render(pin._pin); } catch (Exception ex) { MessageBox.Show(ex.Message, "Error rendering pin"); } Point dropstart = parentui.PointToClient(this.PointToScreen((pin.PinUIElements as PinUI).PinLocation)); parentui._dropLocation = new Point(dropstart.X + 25, dropstart.Y); // Sync the graphs parentui.SyncGraphs(null); if (hr != 0) { MessageBox.Show(DsError.GetErrorText(hr)); } }
public static bool RenderPin(IGraphBuilder graphBuilder, IBaseFilter source, string pinName) { int hr = 0; if (graphBuilder == null) { throw new ArgumentNullException("graphBuilder"); } if (source == null) { throw new ArgumentNullException("source"); } IPin pin = DsFindPin.ByName(source, pinName); if (pin != null) { hr = graphBuilder.Render(pin); Marshal.ReleaseComObject(pin); return(hr >= 0); } return(false); }
protected virtual void SetupAudio() { int hr; IEnumFilters enumFilters; hr = _graph.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); IBaseFilter[] filters = new IBaseFilter[1]; IntPtr fetched = new IntPtr(); while (enumFilters.Next(1, filters, fetched) == 0) { IBaseFilter filter = filters[0] as IBaseFilter; IPin unconnectedPin = DsFindPin.ByConnectionStatus((IBaseFilter)filter, PinConnectedStatus.Unconnected, 0); if (unconnectedPin != null) { PinDirection direction; hr = unconnectedPin.QueryDirection(out direction); DsError.ThrowExceptionForHR(hr); if (direction == PinDirection.Output) { hr = _graph.Render(unconnectedPin); DsError.ThrowExceptionForHR(hr); SetupSampleGrabber(); } } } }
public static void RenderOutputPins(IGraphBuilder graphBuilder, IBaseFilter filter) { if (graphBuilder == null) { throw new ArgumentNullException("graphBuilder"); } if (filter == null) { throw new ArgumentNullException("filter"); } int hr = 0; if (filter == null) { throw new ArgumentNullException("filter"); } IEnumPins enumPins; var pins = new IPin[1]; IntPtr fetched = IntPtr.Zero; hr = filter.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); try { while (enumPins.Next(pins.Length, pins, fetched) == 0) { try { PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { hr = graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); } } finally { Marshal.ReleaseComObject(pins[0]); } } } finally { Marshal.ReleaseComObject(enumPins); } }
public void RenderLocal() { if (renderer != null) { string msg = "Can't RenderLocal when graph is already rendered"; System.Diagnostics.Debug.Fail(msg); throw new InvalidOperationException(msg); } IPin pin = compressor != null ? (IPin)compressor.OutputPin : source.OutputPin; iGB.Render(pin); }
public virtual void RenderLocal() { if (renderer != null) { string msg = Strings.RenderLocalError; System.Diagnostics.Debug.Fail(msg); throw new InvalidOperationException(msg); } IPin pin = compressor != null ? (IPin)compressor.OutputPin : source.OutputPin; iGB.Render(pin); }
/// <summary> /// Creation of the fgm and the adding / removing of filters needs to happen on the /// same thread. So make sure it all happens on the UI thread. /// </summary> private void _RtpStream_FirstFrameReceived() { lock (fgmLock) { DisposeFgm(); Debug.Assert(fgm == null); fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); IBaseFilter rtpSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)rtpSource).Initialize(rtpStream); iGB.AddFilter(rtpSource, "RtpSource"); // Add the chosen audio renderer FilterInfo fi = SelectedSpeaker(); iGB.AddFilter(Filter.CreateBaseFilter(fi), fi.Name); iGB.Render(Filter.GetPin(rtpSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); iBA = (IBasicAudio)fgm; currentVolume = (int)Math.Round(Math.Pow(10.0, (2.0 * (double)(iBA.Volume + 10000)) / 10000.0)); iBA = null; uiState &= ~(int)FAudioVideo.UIState.RemoteAudioStopped; if (form != null) { ((FAudioVideo)form).UpdateAudioUI(uiState); } // FirstFrameReceived interprets fgmState as the *desired* state for the fgm // Because ResumePlayingAudio won't actually start if the state is already // Running, we change it to Stopped so that it will start if (IsPlaying && fgmState == FilterGraph.State.Running) { fgmState = FilterGraph.State.Stopped; ResumePlayingAudio(); } } }
public void RenderPin(Pin pin) { try { int hr = graphBuilder.Render(pin.IPin); DsError.ThrowExceptionForHR(hr); } catch (COMException e) { ShowCOMException(e, "Can't render pin"); return; } catch (Exception e) { MessageBox.Show(e.Message, "Error while rendering pin"); return; } ReloadGraph(); }
private void CreateReceivingGraph() { // Tell the stream we will poll it for data with our own (DShow) thread // Instead of receiving data through the FrameReceived event rtpStream.IsUsingNextFrame = true; // Create receiving filtergraph fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; IBaseFilter rtpSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)rtpSource).Initialize(rtpStream); iGB.AddFilter(rtpSource, "RtpSource"); iGB.Render(Filter.GetPin(rtpSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); VideoWindow(); fgm.Run(); }
public static bool RenderPin(IGraphBuilder graphBuilder, IBaseFilter source, string pinName) { int hr = 0; if (graphBuilder == null) throw new ArgumentNullException("graphBuilder"); if (source == null) throw new ArgumentNullException("source"); IPin pin = DsFindPin.ByName(source, pinName); if (pin != null) { hr = graphBuilder.Render(pin); Marshal.ReleaseComObject(pin); return (hr >= 0); } return false; }
private void WorkerThread() { ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser; Grabber grabber = new Grabber(this); object obj = null; object obj2 = null; IGraphBuilder graphBuilder = null; IBaseFilter filter = null; IBaseFilter baseFilter = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEventEx = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } obj = Activator.CreateInstance(typeFromCLSID); graphBuilder = (IGraphBuilder)obj; graphBuilder.AddSourceFilter(fileName, "source", out filter); if (filter == null) { throw new ApplicationException("Failed creating source filter"); } typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj2 = Activator.CreateInstance(typeFromCLSID); sampleGrabber = (ISampleGrabber)obj2; baseFilter = (IBaseFilter)obj2; graphBuilder.AddFilter(baseFilter, "grabber"); AMMediaType aMMediaType = new AMMediaType(); aMMediaType.MajorType = MediaType.Video; aMMediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(aMMediaType); int num = 0; IPin inPin = Tools.GetInPin(baseFilter, 0); IPin pin = null; while (true) { pin = Tools.GetOutPin(filter, num); if (pin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graphBuilder.Connect(pin, inPin) >= 0) { break; } Marshal.ReleaseComObject(pin); pin = null; num++; } Marshal.ReleaseComObject(pin); Marshal.ReleaseComObject(inPin); if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = videoInfoHeader.BmiHeader.Width; grabber.Height = videoInfoHeader.BmiHeader.Height; aMMediaType.Dispose(); } if (!preventFreezing) { graphBuilder.Render(Tools.GetOutPin(baseFilter, 0)); IVideoWindow videoWindow = (IVideoWindow)obj; videoWindow.put_AutoShow(autoShow: false); videoWindow = null; } sampleGrabber.SetBufferSamples(bufferThem: false); sampleGrabber.SetOneShot(oneShot: false); sampleGrabber.SetCallback(grabber, 1); if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)obj; mediaFilter.SetSyncSource(null); } mediaControl = (IMediaControl)obj; mediaEventEx = (IMediaEventEx)obj; mediaControl.Run(); do { if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0) { mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2); if (lEventCode == DsEvCode.Complete) { reason = ReasonToFinishPlaying.EndOfStreamReached; break; } } }while (!stopEvent.WaitOne(100, exitContext: false)); mediaControl.Stop(); } catch (Exception ex) { if (this.VideoSourceError != null) { this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message)); } } finally { graphBuilder = null; baseFilter = null; sampleGrabber = null; mediaControl = null; mediaEventEx = null; if (obj != null) { Marshal.ReleaseComObject(obj); obj = null; } if (filter != null) { Marshal.ReleaseComObject(filter); filter = null; } if (obj2 != null) { Marshal.ReleaseComObject(obj2); obj2 = null; } } if (this.PlayingFinished != null) { this.PlayingFinished(this, reason); } }
/// <summary> /// Worker thread that captures the images /// </summary> private void Init() { try { log.Trace("Start worker thread"); // Create the main graph _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source _sourceObject = FilterInfo.CreateFilter(_monikerString); // Create the grabber _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; _grabberObject = _grabber as IBaseFilter; // Add the source and grabber to the main graph _graph.AddFilter(_sourceObject, "source"); _graph.AddFilter(_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; _grabber.SetMediaType(mediaType); if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (_grabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); _capGrabber.Width = header.BmiHeader.Width; _capGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch { // Trace log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0)); _grabber.SetBufferSamples(false); _grabber.SetOneShot(false); _grabber.SetCallback(_capGrabber, 1); log.Trace("_grabber set up"); // Get the video window IVideoWindow wnd = (IVideoWindow)_graph; wnd.put_AutoShow(false); wnd = null; // Create the control and run _control = (IMediaControl)_graph; _control.Run(); log.Trace("control runs"); // Wait for the stop signal //while (!_stopSignal.WaitOne(0, true)) //{ // Thread.Sleep(10); //} } }catch (Exception ex) { // Trace log.Debug(ex); Release(); } }
/// <summary> create the used COM components and get the interfaces. </summary> protected bool GetInterfaces() { VMR9Util.g_vmr9 = null; if (IsRadio == false) { Vmr9 = VMR9Util.g_vmr9 = new VMR9Util(); // switch back to directx fullscreen mode Log.Info("RTSPPlayer: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); } //Type comtype = null; //object comobj = null; DsRect rect = new DsRect(); rect.top = 0; rect.bottom = GUIGraphicsContext.form.Height; rect.left = 0; rect.right = GUIGraphicsContext.form.Width; try { graphBuilder = (IGraphBuilder)new FilterGraph(); Log.Info("RTSPPlayer: add source filter"); if (IsRadio == false) { bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder); if (!AddVMR9) { Log.Error("RTSPPlayer:Failed to add VMR9 to graph"); return false; } VMR9Util.g_vmr9.Enable(false); } _mpegDemux = (IBaseFilter)new MPEG2Demultiplexer(); graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer"); _rtspSource = (IBaseFilter)new RtpSourceFilter(); int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter"); if (hr != 0) { Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr); return false; } // add preferred video & audio codecs Log.Info("RTSPPlayer: add video/audio codecs"); string strVideoCodec = ""; string strAudioCodec = ""; string strAudiorenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters string postProcessingFilterSection = "mytv"; using (Settings xmlreader = new MPSettings()) { if (_mediaType == g_Player.MediaType.Video) { strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "movieplayer"; } else { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "mytv"; } enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false); // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } } string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant(); if (IsRadio == false) { if (strVideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); } } if (strAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); } if (enableDvbSubtitles == true) { try { _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder); SubtitleRenderer.GetInstance().SetPlayer(this); dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null)); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]); } if (strAudiorenderer.Length > 0) { audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false); } Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile); IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource; if (interfaceFile == null) { Log.Error("RTSPPlayer:Failed to get IFileSourceFilter"); return false; } //Log.Info("RTSPPlayer: open file:{0}",filename); hr = interfaceFile.Load(m_strCurrentFile, null); if (hr != 0) { Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr); return false; } #region connect rtspsource->demux Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux"); IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0); if (pinTsOut == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return false; } IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0); if (pinDemuxIn == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return false; } hr = graphBuilder.Connect(pinTsOut, pinDemuxIn); if (hr != 0) { Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr); return false; } DirectShowUtil.ReleaseComObject(pinTsOut); DirectShowUtil.ReleaseComObject(pinDemuxIn); #endregion #region render demux output pins if (IsRadio) { Log.Info("RTSPPlayer:render audio demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { graphBuilder.Render(pins[0]); break; } } } } else { Log.Info("RTSPPlayer:render audio/video demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } graphBuilder.Render(pins[0]); } } #endregion // Connect DVB subtitle filter pins in the graph if (_mpegDemux != null && enableDvbSubtitles == true) { IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer; hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr); if (hr == 0) { Log.Info("RTSPPlayer:_pinPcr OK"); IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr"); IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr"); hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr); } else { Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle); if (hr == 0) { Log.Info("RTSPPlayer:_pinSubtitle OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT); if (hr == 0) { Log.Info("RTSPPlayer:_pinPMT OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr); } } if (IsRadio == false) { if (!VMR9Util.g_vmr9.IsVMR9Connected) { //VMR9 is not supported, switch to overlay Log.Info("RTSPPlayer: vmr9 not connected"); _mediaCtrl = null; Cleanup(); return false; } VMR9Util.g_vmr9.SetDeinterlaceMode(); } _mediaCtrl = (IMediaControl)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; _mediaSeeking = (IMediaSeeking)graphBuilder; mediaPos = (IMediaPosition)graphBuilder; basicAudio = graphBuilder as IBasicAudio; //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched); DirectShowUtil.EnableDeInterlace(graphBuilder); if (VMR9Util.g_vmr9 != null) { m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth; m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight; } if (audioRendererFilter != null) { Log.Info("RTSPPlayer9:set reference clock"); IMediaFilter mp = graphBuilder as IMediaFilter; IReferenceClock clock = audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr); } Log.Info("RTSPPlayer: graph build successfull"); return true; } catch (Exception ex) { Error.SetError("Unable to play movie", "Unable build graph for VMR9"); Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); CloseInterfaces(); return false; } }
void RunWorker() { try { graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; sourceObject = FilterInfo.CreateFilter(deviceMoniker); grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; grabberObject = grabber as IBaseFilter; graph.AddFilter(sourceObject, "source"); graph.AddFilter(grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; grabber.SetMediaType(mediaType); if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (grabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); capGrabber.Width = header.BmiHeader.Width; capGrabber.Height = header.BmiHeader.Height; } } graph.Render(grabberObject.GetPin(PinDirection.Output, 0)); grabber.SetBufferSamples(false); grabber.SetOneShot(false); grabber.SetCallback(capGrabber, 1); IVideoWindow wnd = (IVideoWindow)graph; wnd.put_AutoShow(false); wnd = null; control = (IMediaControl)graph; control.Run(); while (!stopSignal.WaitOne(0, true)) { Thread.Sleep(10); } control.StopWhenReady(); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex); } finally { graph = null; sourceObject = null; grabberObject = null; grabber = null; capGrabber = null; control = null; } }
internal void PlayMovieInWindow(string filename) { FileLogger.Log("PlayMovieInWindow: {0}", filename); lastJump = 0; int hr = 0; if (filename == string.Empty) return; this.graphBuilder = (IGraphBuilder)new FilterGraph(); FileLogger.Log("PlayMovieInWindow: Create Graph"); this.evrRenderer = FilterGraphTools.AddFilterFromClsid(this.graphBuilder, new Guid("{FA10746C-9B63-4B6C-BC49-FC300EA5F256}"), "EVR"); if (evrRenderer != null) { FileLogger.Log("PlayMovieInWindow: Add EVR"); SetupEvrDisplay(); //#if DEBUG if (ps.PublishGraph) rot = new DsROTEntry(this.graphBuilder); //#endif IObjectWithSite grfSite = graphBuilder as IObjectWithSite; if (grfSite != null) grfSite.SetSite(new FilterBlocker(filename)); string fileExt = Path.GetExtension(filename).ToLower(); if (ps.PreferredDecoders != null) { foreach (string pa in ps.PreferredDecoders) { string[] pvA = pa.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); if (pvA[0].ToLower() == fileExt) { for (int i = 1; i < pvA.Length; i++) { string strFilter = pvA[i].Trim(); IBaseFilter filter = null; try { if (Regex.IsMatch(strFilter, @"{?\w{8}-\w{4}-\w{4}-\w{4}-\w{12}}?")) filter = FilterGraphTools.AddFilterFromClsid(graphBuilder, new Guid(strFilter), strFilter); else filter = FilterGraphTools.AddFilterByName(graphBuilder, FilterCategory.LegacyAmFilterCategory, strFilter); if (filter != null) { FileLogger.Log("Added {0} to the graph", strFilter); } else FileLogger.Log("{0} not added to the graph", strFilter); } finally { if (filter != null) Marshal.ReleaseComObject(filter); filter = null; } } } } } // Have the graph builder construct its the appropriate graph automatically //hr = this.graphBuilder.RenderFile(filename, null); IBaseFilter sourceFilter = null; try { hr = graphBuilder.AddSourceFilter(filename, "Source", out sourceFilter); DsError.ThrowExceptionForHR(hr); IPin outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); while (outPin != null) { try { hr = graphBuilder.Render(outPin); DsError.ThrowExceptionForHR(hr); } finally { if (outPin != null) Marshal.ReleaseComObject(outPin); outPin = null; } outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); } } finally { if (sourceFilter != null) Marshal.ReleaseComObject(sourceFilter); } SetEvrVideoMode(); // QueryInterface for DirectShow interfaces this.mediaControl = (IMediaControl)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; this.mediaPosition = (IMediaPosition)this.graphBuilder; // Query for video interfaces, which may not be relevant for audio files //this.videoWindow = this.graphBuilder as IVideoWindow; //this.basicVideo = this.graphBuilder as IBasicVideo; // Query for audio interfaces, which may not be relevant for video-only files this.basicAudio = this.graphBuilder as IBasicAudio; // Is this an audio-only file (no video component)? CheckVisibility(); // Have the graph signal event via window callbacks for performance hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WM.GRAPH_NOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); // Setup the video window //hr = this.videoWindow.put_Owner(this.Handle); //DsError.ThrowExceptionForHR(hr); this.evrDisplay.SetVideoWindow(container.Handle); //hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); //DsError.ThrowExceptionForHR(hr); hr = InitVideoWindow(1, 1); DsError.ThrowExceptionForHR(hr); GetFrameStepInterface(); // Complete window initialization //CheckSizeMenu(menuFileSizeNormal); //this.isFullScreen = false; this.currentPlaybackRate = 1.0; //UpdateMainTitle(); container.Focus(); //pre-roll the graph hr = this.mediaControl.Pause(); DsError.ThrowExceptionForHR(hr); // Run the graph to play the media file hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); if (commWatcher != null) commWatcher.Dispose(); string commPath = Path.Combine(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); ReadComm(commPath); commWatcher = new FileSystemWatcher(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); commWatcher.Changed += new FileSystemEventHandler(commWatcher_Changed); commWatcher.Created += new FileSystemEventHandler(commWatcher_Changed); //commWatcher.NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.Size; commWatcher.EnableRaisingEvents = true; MoveToBookmark(); this.currentState = PlayState.Running; //if (isFullScreen) // tmMouseMove.Enabled = true; } else { //MessageBox.Show("EVR cannot be loaded on this PC"); using (EPDialog ed = new EPDialog()) ed.ShowDialog("EVR Error", "The Enhanced Video Renderer cannot be loaded on this PC", 30); } }
// Thread entry point public void WorkerThread() { int hr; Guid cat; Guid med; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object grabberObj = null; // interfaces IGraphBuilder graphBuilder = null; DShowNET.ICaptureGraphBuilder2 captureGraphBuilder = null; IBaseFilter videoDeviceFilter = null; IBaseFilter grabberFilter = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Make a new filter graph graphObj = Activator.CreateInstance(Type.GetTypeFromCLSID(DShowNET.Clsid.FilterGraph, true)); graphBuilder = (IGraphBuilder)graphObj; // Get the Capture Graph Builder Guid clsid = DShowNET.Clsid.CaptureGraphBuilder2; Guid riid = typeof(DShowNET.ICaptureGraphBuilder2).GUID; captureGraphBuilder = (DShowNET.ICaptureGraphBuilder2)DShowNET.DsBugWO.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph((DShowNET.IGraphBuilder)graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } int rotCookie = 0; DShowNET.DsROT.AddGraphToRot(graphBuilder, out rotCookie); // Get the video device and add it to the filter graph if (deviceMoniker != null) { videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(deviceMoniker); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // create sample grabber, object and filter grabberObj = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); grabberFilter = (IBaseFilter)grabberObj; sg = (ISampleGrabber)grabberObj; // add sample grabber filter to filter graph hr = graphBuilder.AddFilter(grabberFilter, "grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Try looking for an video device interleaved media type IBaseFilter testFilter = videoDeviceFilter; // grabberFilter (not supported) object o; cat = DShowNET.PinCategory.Capture; med = DShowNET.MediaType.Interleaved; Guid iid = typeof(DShowNET.IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface(ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o); if (hr != 0) { o = null; } } // Set the video stream configuration to data member videoStreamConfig = o as DShowNET.IAMStreamConfig; o = null; //modifies the stream size and frame rate if (modifyStream) { //set size of frame BitmapInfoHeader bmiHeader; bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader"); bmiHeader.Width = streamSize.Width; bmiHeader.Height = streamSize.Height; setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader); //set frame rate (not supported on the cameras we have) /* * long avgTimePerFrame = (long)(10000000 / framerate); * setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame); */ } // connect pins (Turns on the video device) if (graphBuilder.Connect((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)videoDeviceFilter, 0), (IPin)AForge.Video.DirectShow.Internals.Tools.GetInPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // Set the sample grabber media type settings AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // get media type and set sample grabber parameters if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); if (vih.BmiHeader.Compression != 0) { YUYV = true; grabber.setYUYV(YUYV); } grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; //mt.Dispose(); } // Set various sample grabber properties sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); if (!preventFreezing) { // render graphBuilder.Render((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)); // Do not show active (source) window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(0); win = null; } // get media control mc = (IMediaControl)graphBuilder; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message)); } } // finalization block finally { // release all objects mc = null; graphBuilder = null; captureGraphBuilder = null; videoDeviceFilter = null; grabberFilter = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
/// <summary> /// Builds the DVD DirectShow graph /// </summary> private void BuildGraph() { try { FreeResources(); int hr; /* Create our new graph */ m_graph = (IGraphBuilder) new FilterGraphNoThread(); #if DEBUG m_rot = new DsROTEntry(m_graph); #endif /* We are going to use the VMR9 for now. The EVR does not * seem to work with the interactive menus yet. It should * play Dvds fine otherwise */ var rendererType = VideoRendererType.VideoMixingRenderer9; /* Creates and initializes a new renderer ready to render to WPF */ m_renderer = CreateVideoRenderer(rendererType, m_graph, 2); /* Do some VMR9 specific stuff */ if (rendererType == VideoRendererType.VideoMixingRenderer9) { var mixer = m_renderer as IVMRMixerControl9; if (mixer != null) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetYUV; /* Enable this line to prefer YUV */ //hr = mixer.SetMixingPrefs(dwPrefs); } } /* Create a new DVD Navigator. */ var dvdNav = (IBaseFilter) new DVDNavigator(); /* The DVDControl2 interface lets us control DVD features */ m_dvdControl = dvdNav as IDvdControl2; if (m_dvdControl == null) { throw new Exception("Could not QueryInterface the IDvdControl2 interface"); } /* QueryInterface the DVDInfo2 */ m_dvdInfo = dvdNav as IDvdInfo2; /* If a Dvd directory has been set then use it, if not, let DShow find the Dvd */ if (!string.IsNullOrEmpty(DvdDirectory)) { hr = m_dvdControl.SetDVDDirectory(DvdDirectory); DsError.ThrowExceptionForHR(hr); } /* This gives us the DVD time in Hours-Minutes-Seconds-Frame time format, and other options */ hr = m_dvdControl.SetOption(DvdOptionFlag.HMSFTimeCodeEvents, true); DsError.ThrowExceptionForHR(hr); /* If the graph stops, resume at the same point */ m_dvdControl.SetOption(DvdOptionFlag.ResetOnStop, false); hr = m_graph.AddFilter(dvdNav, "DVD Navigator"); DsError.ThrowExceptionForHR(hr); IPin dvdVideoPin = null; IPin dvdAudioPin = null; IPin dvdSubPicturePin = null; IPin dvdNavPin; int i = 0; /* Loop all the output pins on the DVD Navigator, trying to find which pins are which. * We could more easily find the pins by name, but this is more fun...and more flexible * if we ever want to use a 3rd party DVD navigator that used different pin names */ while ((dvdNavPin = DsFindPin.ByDirection(dvdNav, PinDirection.Output, i)) != null) { var mediaTypes = new AMMediaType[1]; IntPtr pFetched = IntPtr.Zero; IEnumMediaTypes mediaTypeEnum; dvdNavPin.EnumMediaTypes(out mediaTypeEnum); /* Loop over each of the mediaTypes of each pin */ while (mediaTypeEnum.Next(1, mediaTypes, pFetched) == 0) { AMMediaType mediaType = mediaTypes[0]; /* This will be the video stream pin */ if (mediaType.subType == MediaSubType.Mpeg2Video) { /* Keep the ref and we'll work with it later */ dvdVideoPin = dvdNavPin; break; } /* This will be the audio stream pin */ if (mediaType.subType == MediaSubType.DolbyAC3 || mediaType.subType == MediaSubType.Mpeg2Audio) { /* Keep the ref and we'll work with it later */ dvdAudioPin = dvdNavPin; break; } /* This is the Dvd sub picture pin. This generally * shows overlays for Dvd menus and sometimes closed captions */ if (mediaType.subType == DVD_SUBPICTURE_TYPE) { /* Keep the ref and we'll work with it later */ dvdSubPicturePin = dvdNavPin; break; } } mediaTypeEnum.Reset(); Marshal.ReleaseComObject(mediaTypeEnum); i++; } /* This is the windowed renderer. This is *NEEDED* in order * for interactive menus to work with the other VMR9 in renderless mode */ var dummyRenderer = (IBaseFilter) new VideoMixingRenderer9(); var dummyRendererConfig = (IVMRFilterConfig9)dummyRenderer; /* In order for this interactive menu trick to work, the VMR9 * must be set to Windowed. We will make sure the window is hidden later on */ hr = dummyRendererConfig.SetRenderingMode(VMR9Mode.Windowed); DsError.ThrowExceptionForHR(hr); hr = dummyRendererConfig.SetNumberOfStreams(1); DsError.ThrowExceptionForHR(hr); hr = m_graph.AddFilter(dummyRenderer, "Dummy Windowed"); DsError.ThrowExceptionForHR(hr); if (dvdAudioPin != null) { /* This should render out to the default audio device. We * could modify this code here to go out any audio * device, such as SPDIF or another sound card */ hr = m_graph.Render(dvdAudioPin); DsError.ThrowExceptionForHR(hr); } /* Get the first input pin on our dummy renderer */ m_dummyRendererPin = DsFindPin.ByConnectionStatus(dummyRenderer, /* Filter to search */ PinConnectedStatus.Unconnected, 0); /* Get an available pin on our real renderer */ IPin rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */ PinConnectedStatus.Unconnected, 0); /* Pin index */ /* Connect the pin to the renderer */ hr = m_graph.Connect(dvdVideoPin, rendererPin); DsError.ThrowExceptionForHR(hr); /* Get the next available pin on our real renderer */ rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */ PinConnectedStatus.Unconnected, 0); /* Pin index */ /* Render the sub picture, which will connect * the DVD navigator to the codec, not the renderer */ hr = m_graph.Render(dvdSubPicturePin); DsError.ThrowExceptionForHR(hr); /* These are the subtypes most likely to be our dvd subpicture */ var preferedSubpictureTypes = new[] { MediaSubType.ARGB4444, MediaSubType.AI44, MediaSubType.AYUV, MediaSubType.ARGB32 }; IPin dvdSubPicturePinOut = null; /* Find what should be the subpicture pin out */ foreach (var guidType in preferedSubpictureTypes) { dvdSubPicturePinOut = FindPinInGraphByMediaType(guidType, /* GUID of the media type being searched for */ PinDirection.Output, m_graph); /* Our current graph */ if (dvdSubPicturePinOut != null) { break; } } if (dvdSubPicturePinOut == null) { throw new Exception("Could not find the sub picture pin out"); } /* Here we connec thte Dvd sub picture pin to the video renderer. * This enables the overlays on Dvd menus and some closed * captions to be rendered. */ hr = m_graph.Connect(dvdSubPicturePinOut, rendererPin); DsError.ThrowExceptionForHR(hr); /* Search for the Line21 out in the graph */ IPin line21Out = FindPinInGraphByMediaType(MediaType.AuxLine21Data, PinDirection.Output, m_graph); if (line21Out == null) { throw new Exception("Could not find the Line21 pin out"); } /* We connect our line21Out out in to the dummy renderer * this is what ultimatly makes interactive DVDs work with * VMR9 in renderless (for WPF) */ hr = m_graph.Connect(line21Out, m_dummyRendererPin); DsError.ThrowExceptionForHR(hr); /* This is the dummy renderers Win32 window. */ m_dummyRenderWindow = dummyRenderer as IVideoWindow; if (m_dummyRenderWindow == null) { throw new Exception("Could not QueryInterface for IVideoWindow"); } ConfigureDummyWindow(); /* Setup our base classes with this filter graph */ SetupFilterGraph(m_graph); /* Sets the NaturalVideoWidth/Height */ //SetNativePixelSizes(m_renderer); } catch (Exception ex) { FreeResources(); InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); return; } InvokeMediaOpened(); }
private void CreateFilters() { isValid = true; // grabber grabberVideo = new GrabberVideo(this); grabberAudio = new GrabberAudio(this); // objects graphObject = null; grabberObjectVideo = null; grabberObjectAudio = null; int sourceBaseVideoPinIndex = 0; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object if (fileName.ToLower().EndsWith(".wmv")) { type = Type.GetTypeFromCLSID(Clsid.WMASFReader); if (type == null) { throw new ApplicationException("Failed creating ASF Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); sourceBaseVideoPinIndex = 1; } else { graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { try { type = Type.GetTypeFromCLSID(Clsid.AsyncReader); if (type == null) { throw new ApplicationException("Failed creating Async Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); } catch { throw new ApplicationException("Failed creating source filter"); } } sourceBaseVideoPinIndex = 0; } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObjectVideo = Activator.CreateInstance(type); sampleGrabberVideo = (ISampleGrabber)grabberObjectVideo; grabberBaseVideo = (IBaseFilter)grabberObjectVideo; // add grabber filters to graph graph.AddFilter(grabberBaseVideo, "grabberVideo"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Video, SubType = MediaSubType.ARGB32 /* MediaSubType.RGB24 */ }; ; sampleGrabberVideo.SetMediaType(mediaType); // connect pins IPin outPin = Tools.GetOutPin(sourceBase, sourceBaseVideoPinIndex); IPin inPin = Tools.GetInPin(grabberBaseVideo, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabberVideo.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabberVideo.Width = vih.BmiHeader.Width; grabberVideo.Height = vih.BmiHeader.Height; mediaType.Dispose(); } if (useAudioGrabber) { // ***************************************************************** // ******** Add the audio grabber to monitor audio peaks *********** bool audioGrabberIsConnected = false; Tools.FilterInfo2 filterInfo2 = Tools.GetNextFilter(sourceBase, PinDirection.Output, 0); foreach (Tools.PinInfo2 pinInfo2 in filterInfo2.Pins) { if (pinInfo2.PinInfo.Direction == PinDirection.Output) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { try { graph.Render(pinInfo2.Pin); AMMediaType mt = new AMMediaType(); pinInfo2.Pin.ConnectionMediaType(mt); if (mt.MajorType == MediaType.Audio) { // Obtain a reference to the filter connected to the audio output of the video splitter (usually, this is the audio decoder) Tools.FilterInfo2 decoderFilterInfo2 = Tools.GetNextFilter(pinInfo2.PinInfo.Filter, PinDirection.Output, 0); // Remove all the filters connected to the audio decoder filter System.Collections.Generic.List <Tools.FilterInfo2> filtersInfo2 = new System.Collections.Generic.List <Tools.FilterInfo2>(); Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(decoderFilterInfo2.Filter, PinDirection.Output, 0); while (true) { filtersInfo2.Add(testFilterInfo2); testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0); if (testFilterInfo2.Filter == null) { break; } } foreach (Tools.FilterInfo2 fi2 in filtersInfo2) { graph.RemoveFilter(fi2.Filter); fi2.Release(); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating audio sample grabber"); } // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); outPin = Tools.GetOutPin(decoderFilterInfo2.Filter, 0); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting filter to grabberBaseAudio"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); mt = new AMMediaType(); outPin.ConnectionMediaType(mt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(mt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } catch { } } } } filterInfo2.Release(); if (!audioGrabberIsConnected) { foreach (Tools.PinInfo2 pinInfo2 in Tools.GetPins(sourceBase)) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { foreach (AMMediaType mt in Tools.GetMediaTypes(pinInfo2.Pin)) { if (mt.MajorType == MediaType.Audio) { // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(pinInfo2.Pin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); AMMediaType amt = new AMMediaType(); outPin.ConnectionMediaType(amt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(amt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } } } } // ***************************************************************** } // let's do the rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBaseVideo, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabberVideo.SetBufferSamples(false); sampleGrabberVideo.SetOneShot(false); sampleGrabberVideo.SetCallback(grabberVideo, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media seek control mediaSeekControl = (IMediaSeeking)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // get media audio control basicAudio = (IBasicAudio)graphObject; } catch (Exception exception) { DestroyFilters(); // provide information to clients VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message)); } }
/// <summary> /// Configures the DirectShow graph to play the selected video capture /// device with the selected parameters /// </summary> private void SetupGraph() { /* Clean up any messes left behind */ FreeResources(); try { /* Create a new graph */ m_graph = (IGraphBuilder)new FilterGraphNoThread(); #if DEBUG m_rotEntry = new DsROTEntry(m_graph); #endif /* Create a capture graph builder to help * with rendering a capture graph */ var captureGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); /* Set our filter graph to the capture graph */ int hr = captureGraph.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); /* Add our capture device source to the graph */ if (m_videoCaptureSourceChanged) { m_captureDevice = AddFilterByName(m_graph, FilterCategory.VideoInputDevice, VideoCaptureSource); m_videoCaptureSourceChanged = false; } else if (m_videoCaptureDeviceChanged) { m_captureDevice = AddFilterByDevicePath(m_graph, FilterCategory.VideoInputDevice, VideoCaptureDevice.DevicePath); m_videoCaptureDeviceChanged = false; } /* If we have a null capture device, we have an issue */ if (m_captureDevice == null) throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource)); if(UseYuv && !EnableSampleGrabbing) { /* Configure the video output pin with our parameters and if it fails * then just use the default media subtype*/ if(!SetVideoCaptureParameters(captureGraph, m_captureDevice, MediaSubType.YUY2)) SetVideoCaptureParameters(captureGraph, m_captureDevice, Guid.Empty); } else /* Configure the video output pin with our parameters */ SetVideoCaptureParameters(captureGraph, m_captureDevice, Guid.Empty); var rendererType = VideoRendererType.VideoMixingRenderer9; /* Creates a video renderer and register the allocator with the base class */ m_renderer = CreateVideoRenderer(rendererType, m_graph, 1); if (rendererType == VideoRendererType.VideoMixingRenderer9) { var mixer = m_renderer as IVMRMixerControl9; if (mixer != null && !EnableSampleGrabbing && UseYuv) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetYUV; /* Prefer YUV */ mixer.SetMixingPrefs(dwPrefs); } } if (EnableSampleGrabbing) { m_sampleGrabber = (ISampleGrabber)new SampleGrabber(); SetupSampleGrabber(m_sampleGrabber); hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); } var videoOutPin = DsFindPin.ByDirection(m_captureDevice, PinDirection.Output, 0); if (videoOutPin == null) throw new Exception("Could not query the video output pin on source filter"); /* Intelligently connect the pins in the graph to the renderer */ hr = m_graph.Render(videoOutPin); Marshal.ReleaseComObject(videoOutPin); //hr = captureGraph.RenderStream(PinCategory.Capture, // MediaType.Video, // m_captureDevice, // null, // m_renderer); DsError.ThrowExceptionForHR(hr); /* Register the filter graph * with the base classes */ SetupFilterGraph(m_graph); /* Sets the NaturalVideoWidth/Height */ SetNativePixelSizes(m_renderer); HasVideo = true; /* Make sure we Release() this COM reference */ Marshal.ReleaseComObject(captureGraph); } catch (Exception ex) { /* Something got fuct up */ FreeResources(); InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } /* Success */ InvokeMediaOpened(); }
public static int RenderFilter(IGraphBuilder graph, IBaseFilter up) { IPin pinSrc = null; int i = 0; while ((pinSrc = DsFindPin.ByDirection(up, PinDirection.Output, i++)) != null) { graph.Render(pinSrc); Marshal.FinalReleaseComObject(pinSrc); } return 0; }
public static bool RenderOutputPins(IGraphBuilder graphBuilder, IBaseFilter filter, int maxPinsToRender, bool tryAllFilters) { int pinsRendered = 0; bool bAllConnected = true; IEnumPins pinEnum; FilterInfo info; filter.QueryFilterInfo(out info); ReleaseComObject(info.pGraph); int hr = filter.EnumPins(out pinEnum); if ((hr == 0) && (pinEnum != null)) { Log.Info("got pins"); pinEnum.Reset(); IPin[] pins = new IPin[1]; int iFetched; int iPinNo = 0; do { // Get the next pin //Log.Info(" get pin:{0}",iPinNo); iPinNo++; hr = pinEnum.Next(1, pins, out iFetched); if (hr == 0) { if (iFetched == 1 && pins[0] != null) { PinInfo pinInfo = new PinInfo(); hr = pins[0].QueryPinInfo(out pinInfo); DsUtils.FreePinInfo(pinInfo); if (hr == 0) { Log.Info(" got pin#{0}:{1}", iPinNo - 1, pinInfo.name); } else { Log.Info(" got pin:?"); } PinDirection pinDir; pins[0].QueryDirection(out pinDir); if (pinDir == PinDirection.Output) { IPin pConnectPin = null; hr = pins[0].ConnectedTo(out pConnectPin); if (hr != 0 || pConnectPin == null) { hr = 0; if (TryConnect(graphBuilder, info.achName, pins[0], tryAllFilters)) //if ((hr=graphBuilder.Render(pins[0])) == 0) { Log.Info(" render ok"); } else { Log.Error(" render {0} failed:{1:x}, trying alternative graph builder", pinInfo.name, hr); if ((hr = graphBuilder.Render(pins[0])) == 0) { Log.Info(" render ok"); } else { Log.Error(" render failed:{0:x}", hr); bAllConnected = false; } } pinsRendered++; } if (pConnectPin != null) { ReleaseComObject(pConnectPin); } pConnectPin = null; //else Log.Info("pin is already connected"); } ReleaseComObject(pins[0]); } else { iFetched = 0; Log.Info("no pins?"); break; } } else { iFetched = 0; } } while (iFetched == 1 && pinsRendered < maxPinsToRender && bAllConnected); ReleaseComObject(pinEnum); } return bAllConnected; }
private async void Button_Click(object sender, RoutedEventArgs e) { OpenFileDialog lopenFileDialog = new OpenFileDialog(); lopenFileDialog.AddExtension = true; var lresult = lopenFileDialog.ShowDialog(); if (lresult != true) { return; } IBaseFilter lDSoundRender = new DSoundRender() as IBaseFilter; m_pGraph.AddFilter(lDSoundRender, "Audio Renderer"); int k = 0; IPin[] lAudioRendererPins = new IPin[1]; IEnumPins ppEnum; k = lDSoundRender.EnumPins(out ppEnum); k = ppEnum.Next(1, lAudioRendererPins, IntPtr.Zero); var lCaptureManagerEVRMultiSinkFactory = await CaptureManagerVideoRendererMultiSinkFactory.getInstance().getICaptureManagerEVRMultiSinkFactoryAsync(); uint lMaxVideoRenderStreamCount = await lCaptureManagerEVRMultiSinkFactory.getMaxVideoRenderStreamCountAsync(); if (lMaxVideoRenderStreamCount == 0) { return; } List <object> lOutputNodesList = await lCaptureManagerEVRMultiSinkFactory.createOutputNodesAsync( IntPtr.Zero, mEVRDisplay.Surface.texture, 1); if (lOutputNodesList.Count == 0) { return; } IBaseFilter lVideoMixingRenderer9 = (IBaseFilter)lOutputNodesList[0]; var h = m_pGraph.AddFilter(lVideoMixingRenderer9, "lVideoMixingRenderer9"); IPin[] lVideoRendererPin = new IPin[1]; k = lVideoMixingRenderer9.EnumPins(out ppEnum); k = ppEnum.Next(1, lVideoRendererPin, IntPtr.Zero); IBaseFilter m_SourceFilter = null; m_pGraph.AddSourceFilter(lopenFileDialog.FileName, null, out m_SourceFilter); IEnumPins lEnumPins = null; m_SourceFilter.EnumPins(out lEnumPins); IPin[] lPins = new IPin[1]; while (lEnumPins.Next(1, lPins, IntPtr.Zero) == 0) { IEnumMediaTypes lIEnumMediaTypes; lPins[0].EnumMediaTypes(out lIEnumMediaTypes); AMMediaType[] ppMediaTypes = new AMMediaType[1]; while (lIEnumMediaTypes.Next(1, ppMediaTypes, IntPtr.Zero) == 0) { var gh = ppMediaTypes[0].subType; if (ppMediaTypes[0].majorType == DirectShowLib.MediaType.Video) { k = m_pGraph.Connect(lPins[0], lVideoRendererPin[0]); } } foreach (var item in lPins) { k = m_pGraph.Render(item); } } IMediaControl lIMediaControl = m_pGraph as IMediaControl; k = lIMediaControl.Run(); }
/// <summary> /// Build a graph with sampleGrabber. Render it, and get the media type. /// </summary> /// <param name="payload"></param> /// <returns></returns> public bool Build(PayloadType payload, RtpStream newStream) { this.stream = newStream; this.ssrc = newStream.SSRC; //Required as of RC3: this.stream.IsUsingNextFrame = true; if ((ssrc == 0) || !((payload == PayloadType.dynamicVideo) || (payload == PayloadType.dynamicAudio))) { errorMsg = "Invalid inputs to build method."; return(false); } fgm = new FilgraphManagerClass(); MSR.LST.MDShow.IBaseFilter bfSource = null; IGraphBuilder iGB = (IGraphBuilder)fgm; //if (false) // rotnum = FilterGraph.AddToRot(iGB); //AddToRot(iGB); try { bfSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(this.stream); iGB.AddFilter(bfSource, "RtpSource"); MSR.LST.MDShow.IPin sourceOutput = Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0); //Add SampleGrabber filter MSR.LST.MDShow.IBaseFilter bfGrabber = SampleGrabberClass.CreateInstance(); iGB.AddFilter(bfGrabber, "Grabber"); UW.CSE.MDShow.ISampleGrabber sgGrabber = (UW.CSE.MDShow.ISampleGrabber)bfGrabber; //Set mediatype UW.CSE.MDShow._AMMediaType mt = new UW.CSE.MDShow._AMMediaType(); if (payload == PayloadType.dynamicVideo) { mt.majortype = MediaType.MajorType.MEDIATYPE_Video; //PRI2: RGB24 seems to work for all video? We have used YUY2 in the past, but that won't work // for screen streaming. Probably could use more testing //mt.subtype = MediaType.SubType.MEDIASUBTYPE_YUY2; mt.subtype = MediaType.SubType.MEDIASUBTYPE_RGB24; } else { mt.majortype = MediaType.MajorType.MEDIATYPE_Audio; mt.subtype = MediaType.SubType.MEDIASUBTYPE_PCM; //MEDIASUBTYPE_PCM; } sgGrabber.SetMediaType(ref mt); //Add samplegrabber callback //0 is sampleCB, 1 is bufferCB. Only bufferCB is actually returning data so far. sgGrabber.SetCallback(callBack, 1); sgGrabber.SetOneShot(0); sgGrabber.SetBufferSamples(0); iGB.Render(sourceOutput); UW.CSE.MDShow._AMMediaType uwmt = new UW.CSE.MDShow._AMMediaType(); sgGrabber.GetConnectedMediaType(ref uwmt); connectedMT = copy_AMMediaType(uwmt); } catch (Exception e) { errorMsg = e.Message; Debug.WriteLine("Exception while building graph: " + e.ToString()); eventLog.WriteEntry("Exception while building graph: " + e.ToString(), EventLogEntryType.Error, 1001); return(false); } return(true); }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object sourceObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IFileSourceFilter fileSource = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object type = Type.GetTypeFromCLSID(Clsid.AsyncReader); if (type == null) { throw new ApplicationException("Failed creating filter async reader"); } sourceObject = Activator.CreateInstance(type); sourceBase = (IBaseFilter)sourceObject; fileSource = (IFileSourceFilter)sourceObject; fileSource.Load(fileName, null); // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add source and grabber filters to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins if (graph.Connect(Tools.GetOutPin(sourceBase, 0), Tools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // get media control mediaControl = (IMediaControl)graphObject; // run mediaControl.Run( ); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mediaControl.StopWhenReady( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; sourceBase = null; grabberBase = null; sampleGrabber = null; mediaControl = null; fileSource = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } }
void SetupPlaybackGraph(string fname) { int hr; try { hr = graphBuilder.RenderFile(fname, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; // ??? hr = sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(smartTee, "smartTee"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IBaseFilter renderer; hr = graphBuilder.FindFilterByName("Video Renderer", out renderer); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin inPin; IPin srcPin; hr = DsUtils.GetPin(renderer, PinDirection.Input, out inPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = inPin.ConnectedTo(out srcPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = srcPin.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.RemoveFilter(renderer); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(renderer); Marshal.ReleaseComObject(inPin); hr = DsUtils.GetPin(smartTee, PinDirection.Input, out inPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.Connect(srcPin, inPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(srcPin); Marshal.ReleaseComObject(inPin); srcPin = inPin = null; hr = DsUtils.GetPin(smartTee, PinDirection.Output, out srcPin, 1); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // grabber Input hr = DsUtils.GetPin(baseGrabFlt, PinDirection.Input, out inPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // smartTee -> grabber hr = graphBuilder.Connect(srcPin, inPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(srcPin); Marshal.ReleaseComObject(inPin); srcPin = inPin = null; if (preview) { // grabber Input hr = DsUtils.GetPin(smartTee, PinDirection.Output, out srcPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.Render(srcPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(srcPin); srcPin = null; } media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; //Modified according to the platform SDK, to capture the buffer hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); } if (hr == 0) { hr = sampGrabber.SetCallback(sampleGrabber, 1); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } catch (Exception ee) { throw new Exception("Could not setup graph\r\n" + ee.Message); } }
void BuildGraph() { int hr; m_graphBuilder = (IGraphBuilder) new FilterGraph(); //Create the media control for controlling the graph mediaControl = (IMediaControl)m_graphBuilder; Guid gBouncingBall = new Guid("fd5010418ebe11ce818300aa00577da1"); Guid gShapeGuid = new Guid("E52BEAB445FB4D5ABC9E2381E61DCC47"); Type comtype; Type comtypeShapes; comtype = Type.GetTypeFromCLSID(gBouncingBall); comtypeShapes = Type.GetTypeFromCLSID(gShapeGuid); BouncingBallFilter = (IBaseFilter)Activator.CreateInstance(comtype); hr = m_graphBuilder.AddFilter(BouncingBallFilter, "Bouncing ball"); DsError.ThrowExceptionForHR(hr); ShapesFilter = (IBaseFilter)Activator.CreateInstance(comtypeShapes); hr = m_graphBuilder.AddFilter(ShapesFilter, "Shapes Filter"); DsError.ThrowExceptionForHR(hr); if (m_saveCrossToFile == true) { AddPinTeeFilter(); AddDumpFilter(); AddElecardAVCVideoEncoder(); DumpCom dump = new DumpCom(DumpFilter); if (dump.SetFileName("c:\\dump1.asf") != 0) { MessageBox.Show("Error set file"); } } int j = 0; for (int i = 0; i < m_cross.Length * 2; i += 2) { m_cross[j] = new CrossMx(i, i + 1, ShapesFilter); m_cross[j].Init(2000); j++; } if (m_saveCrossToFile) { IPin pinInfinteInput = DsFindPin.ByName(InfiniteTeeFilter, "Input"); IPin pinInfinteOutput1 = DsFindPin.ByName(InfiniteTeeFilter, "Output1"); IPin pinSourceOut = DsFindPin.ByName(BouncingBallFilter, "Out"); IPin pinAVCInput = DsFindPin.ByName(ElecardAVCFilter, "Input"); IPin pinAVCOutput = DsFindPin.ByName(ElecardAVCFilter, "Output"); IPin pinShapeIn = DsFindPin.ByName(ShapesFilter, "Input"); IPin pinShapeOut = DsFindPin.ByName(ShapesFilter, "Output"); IPin pinDumpIn = DsFindPin.ByName(DumpFilter, "Input"); hr = m_graphBuilder.Connect(pinSourceOut, pinShapeIn); DsError.ThrowExceptionForHR(hr); hr = m_graphBuilder.Connect(pinShapeOut, pinInfinteInput); DsError.ThrowExceptionForHR(hr); //hr = m_graphBuilder.Connect(pinInfinteOutput1, pinDumpIn); hr = m_graphBuilder.Connect(pinInfinteOutput1, pinAVCInput); DsError.ThrowExceptionForHR(hr); hr = m_graphBuilder.Connect(pinAVCOutput, pinDumpIn); DsError.ThrowExceptionForHR(hr); IPin pinInfinteOutput2 = DsFindPin.ByName(InfiniteTeeFilter, "Output2"); // let DirectShow connects the front and back hr = m_graphBuilder.Render((DirectShowLib.IPin)pinInfinteOutput2); DsError.ThrowExceptionForHR(hr); } else { IPin pinSourceOut = DsFindPin.ByName(BouncingBallFilter, "Out"); IPin pinShapeIn = DsFindPin.ByName(ShapesFilter, "Input"); hr = m_graphBuilder.Connect(pinSourceOut, pinShapeIn); DsError.ThrowExceptionForHR(hr); IPin pinShapeOut = DsFindPin.ByName(ShapesFilter, "Output"); // let DirectShow connects the front and back hr = m_graphBuilder.Render((DirectShowLib.IPin)pinShapeOut); DsError.ThrowExceptionForHR(hr); } }
// Thread entry point public void WorkerThread() { int hr; Guid cat; Guid med; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object grabberObj = null; // interfaces IGraphBuilder graphBuilder = null; ICaptureGraphBuilder2 captureGraphBuilder = null; IBaseFilter videoDeviceFilter = null; IBaseFilter grabberFilter = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Make a new filter graph graphObj = Activator.CreateInstance( Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); graphBuilder = (IGraphBuilder)graphObj; // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2) TempFix.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Get the video device and add it to the filter graph if (source != null) { videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker(source); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // create sample grabber, object and filter grabberObj = Activator.CreateInstance( Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); grabberFilter = (IBaseFilter)grabberObj; sg = (ISampleGrabber)grabberObj; // add sample grabber filter to filter graph hr = graphBuilder.AddFilter(grabberFilter, "grabber"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Try looking for an video device interleaved media type IBaseFilter testFilter = videoDeviceFilter; // grabberFilter (not supported) object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, testFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, testFilter, ref iid, out o); if (hr != 0) o = null; } // Set the video stream configuration to data member videoStreamConfig = o as IAMStreamConfig; o = null; // Experimental testing: Try to set the Frame Size & Rate // Results: When enabled, the grabber video breaks up into // several duplicate frames (6 frames) bool bdebug = true; if (bdebug) { BitmapInfoHeader bmiHeader; bmiHeader = (BitmapInfoHeader) getStreamConfigSetting(videoStreamConfig, "BmiHeader"); bmiHeader.Width = framesize.Width; bmiHeader.Height = framesize.Height; setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader); long avgTimePerFrame = (long)(10000000 / framerate); setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame); } // connect pins (Turns on the video device) if (graphBuilder.Connect(DSTools.GetOutPin( videoDeviceFilter, 0), DSTools.GetInPin(grabberFilter, 0)) < 0) throw new ApplicationException( "Failed connecting filters"); // Set the sample grabber media type settings AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graphBuilder.Render(DSTools.GetOutPin(grabberFilter, 0)); // Set various sample grabber properties sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // Do not show active (source) window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); } // finalization block finally { // release all objects mc = null; graphBuilder = null; captureGraphBuilder = null; videoDeviceFilter = null; grabberFilter = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
// Thread entry point public void WorkerThread() { // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object sourceObj = null; object grabberObj = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Get type for filter graph Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (srvType == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObj = Activator.CreateInstance(srvType); graph = (IGraphBuilder)graphObj; // ---- UCOMIBindCtx bindCtx = null; UCOMIMoniker moniker = null; int n = 0; // create bind context if (Win32.CreateBindCtx(0, out bindCtx) == 0) { // convert moniker`s string to a moniker if (Win32.MkParseDisplayName(bindCtx, source, ref n, out moniker) == 0) { // get device base filter Guid filterId = typeof(IBaseFilter).GUID; moniker.BindToObject(null, null, ref filterId, out sourceObj); Marshal.ReleaseComObject(moniker); moniker = null; } Marshal.ReleaseComObject(bindCtx); bindCtx = null; } // ---- if (sourceObj == null) { throw new ApplicationException("Failed creating device object for moniker"); } sourceBase = (IBaseFilter)sourceObj; // Get type for sample grabber srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (srvType == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObj = Activator.CreateInstance(srvType); sg = (ISampleGrabber)grabberObj; grabberBase = (IBaseFilter)grabberObj; // add source filter to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // connect pins if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graph.Render(DSTools.GetOutPin(grabberBase, 0)); // sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); } // finalization block finally { // release all objects mc = null; graph = null; sourceBase = null; grabberBase = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (sourceObj != null) { Marshal.ReleaseComObject(sourceObj); sourceObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
/// <summary> /// Builds the DVD DirectShow graph /// </summary> private void BuildGraph() { try { FreeResources(); int hr; /* Create our new graph */ m_graph = (IGraphBuilder)new FilterGraphNoThread(); #if DEBUG m_rot = new DsROTEntry(m_graph); #endif /* We are going to use the VMR9 for now. The EVR does not * seem to work with the interactive menus yet. It should * play Dvds fine otherwise */ var rendererType = VideoRendererType.VideoMixingRenderer9; /* Creates and initializes a new renderer ready to render to WPF */ m_renderer = CreateVideoRenderer(rendererType, m_graph, 2); /* Do some VMR9 specific stuff */ if (rendererType == VideoRendererType.VideoMixingRenderer9) { var mixer = m_renderer as IVMRMixerControl9; if(mixer != null) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetYUV; /* Enable this line to prefer YUV */ //hr = mixer.SetMixingPrefs(dwPrefs); } } /* Create a new DVD Navigator. */ var dvdNav = (IBaseFilter)new DVDNavigator(); /* The DVDControl2 interface lets us control DVD features */ m_dvdControl = dvdNav as IDvdControl2; if (m_dvdControl == null) throw new Exception("Could not QueryInterface the IDvdControl2 interface"); /* QueryInterface the DVDInfo2 */ m_dvdInfo = dvdNav as IDvdInfo2; /* If a Dvd directory has been set then use it, if not, let DShow find the Dvd */ if (!string.IsNullOrEmpty(DvdDirectory)) { hr = m_dvdControl.SetDVDDirectory(DvdDirectory); DsError.ThrowExceptionForHR(hr); } /* This gives us the DVD time in Hours-Minutes-Seconds-Frame time format, and other options */ hr = m_dvdControl.SetOption(DvdOptionFlag.HMSFTimeCodeEvents, true); DsError.ThrowExceptionForHR(hr); /* If the graph stops, resume at the same point */ m_dvdControl.SetOption(DvdOptionFlag.ResetOnStop, false); hr = m_graph.AddFilter(dvdNav, "DVD Navigator"); DsError.ThrowExceptionForHR(hr); IPin dvdVideoPin = null; IPin dvdAudioPin = null; IPin dvdSubPicturePin = null; IPin dvdNavPin; int i = 0; /* Loop all the output pins on the DVD Navigator, trying to find which pins are which. * We could more easily find the pins by name, but this is more fun...and more flexible * if we ever want to use a 3rd party DVD navigator that used different pin names */ while ((dvdNavPin = DsFindPin.ByDirection(dvdNav, PinDirection.Output, i)) != null) { var mediaTypes = new AMMediaType[1]; IntPtr pFetched = IntPtr.Zero; IEnumMediaTypes mediaTypeEnum; dvdNavPin.EnumMediaTypes(out mediaTypeEnum); /* Loop over each of the mediaTypes of each pin */ while (mediaTypeEnum.Next(1, mediaTypes, pFetched) == 0) { AMMediaType mediaType = mediaTypes[0]; /* This will be the video stream pin */ if (mediaType.subType == MediaSubType.Mpeg2Video) { /* Keep the ref and we'll work with it later */ dvdVideoPin = dvdNavPin; break; } /* This will be the audio stream pin */ if (mediaType.subType == MediaSubType.DolbyAC3 || mediaType.subType == MediaSubType.Mpeg2Audio) { /* Keep the ref and we'll work with it later */ dvdAudioPin = dvdNavPin; break; } /* This is the Dvd sub picture pin. This generally * shows overlays for Dvd menus and sometimes closed captions */ if (mediaType.subType == DVD_SUBPICTURE_TYPE) { /* Keep the ref and we'll work with it later */ dvdSubPicturePin = dvdNavPin; break; } } mediaTypeEnum.Reset(); Marshal.ReleaseComObject(mediaTypeEnum); i++; } /* This is the windowed renderer. This is *NEEDED* in order * for interactive menus to work with the other VMR9 in renderless mode */ var dummyRenderer = (IBaseFilter)new VideoMixingRenderer9(); var dummyRendererConfig = (IVMRFilterConfig9)dummyRenderer; /* In order for this interactive menu trick to work, the VMR9 * must be set to Windowed. We will make sure the window is hidden later on */ hr = dummyRendererConfig.SetRenderingMode(VMR9Mode.Windowed); DsError.ThrowExceptionForHR(hr); hr = dummyRendererConfig.SetNumberOfStreams(1); DsError.ThrowExceptionForHR(hr); hr = m_graph.AddFilter(dummyRenderer, "Dummy Windowed"); DsError.ThrowExceptionForHR(hr); if (dvdAudioPin != null) { /* This should render out to the default audio device. We * could modify this code here to go out any audio * device, such as SPDIF or another sound card */ hr = m_graph.Render(dvdAudioPin); DsError.ThrowExceptionForHR(hr); } /* Get the first input pin on our dummy renderer */ m_dummyRendererPin = DsFindPin.ByConnectionStatus(dummyRenderer, /* Filter to search */ PinConnectedStatus.Unconnected, 0); /* Get an available pin on our real renderer */ IPin rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */ PinConnectedStatus.Unconnected, 0); /* Pin index */ /* Connect the pin to the renderer */ hr = m_graph.Connect(dvdVideoPin, rendererPin); DsError.ThrowExceptionForHR(hr); /* Get the next available pin on our real renderer */ rendererPin = DsFindPin.ByConnectionStatus(m_renderer, /* Filter to search */ PinConnectedStatus.Unconnected, 0); /* Pin index */ /* Render the sub picture, which will connect * the DVD navigator to the codec, not the renderer */ hr = m_graph.Render(dvdSubPicturePin); DsError.ThrowExceptionForHR(hr); /* These are the subtypes most likely to be our dvd subpicture */ var preferedSubpictureTypes = new[]{MediaSubType.ARGB4444, MediaSubType.AI44, MediaSubType.AYUV, MediaSubType.ARGB32}; IPin dvdSubPicturePinOut = null; /* Find what should be the subpicture pin out */ foreach (var guidType in preferedSubpictureTypes) { dvdSubPicturePinOut = FindPinInGraphByMediaType(guidType, /* GUID of the media type being searched for */ PinDirection.Output, m_graph); /* Our current graph */ if (dvdSubPicturePinOut != null) break; } if (dvdSubPicturePinOut == null) throw new Exception("Could not find the sub picture pin out"); /* Here we connec thte Dvd sub picture pin to the video renderer. * This enables the overlays on Dvd menus and some closed * captions to be rendered. */ hr = m_graph.Connect(dvdSubPicturePinOut, rendererPin); DsError.ThrowExceptionForHR(hr); /* Search for the Line21 out in the graph */ IPin line21Out = FindPinInGraphByMediaType(MediaType.AuxLine21Data, PinDirection.Output, m_graph); if (line21Out == null) throw new Exception("Could not find the Line21 pin out"); /* We connect our line21Out out in to the dummy renderer * this is what ultimatly makes interactive DVDs work with * VMR9 in renderless (for WPF) */ hr = m_graph.Connect(line21Out, m_dummyRendererPin); DsError.ThrowExceptionForHR(hr); /* This is the dummy renderers Win32 window. */ m_dummyRenderWindow = dummyRenderer as IVideoWindow; if (m_dummyRenderWindow == null) throw new Exception("Could not QueryInterface for IVideoWindow"); ConfigureDummyWindow(); /* Setup our base classes with this filter graph */ SetupFilterGraph(m_graph); /* Sets the NaturalVideoWidth/Height */ SetNativePixelSizes(m_renderer); } catch (Exception ex) { FreeResources(); InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); return; } InvokeMediaOpened(); }
/// <summary> create the used COM components and get the interfaces. </summary> protected bool GetInterfaces() { VMR9Util.g_vmr9 = null; if (IsRadio == false) { Vmr9 = VMR9Util.g_vmr9 = new VMR9Util(); // switch back to directx fullscreen mode Log.Info("RTSPPlayer: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); } //Type comtype = null; //object comobj = null; DsRect rect = new DsRect(); rect.top = 0; rect.bottom = GUIGraphicsContext.form.Height; rect.left = 0; rect.right = GUIGraphicsContext.form.Width; try { graphBuilder = (IGraphBuilder) new FilterGraph(); Log.Info("RTSPPlayer: add source filter"); if (IsRadio == false) { bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder); if (!AddVMR9) { Log.Error("RTSPPlayer:Failed to add VMR9 to graph"); return(false); } VMR9Util.g_vmr9.Enable(false); } _mpegDemux = (IBaseFilter) new MPEG2Demultiplexer(); graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer"); _rtspSource = (IBaseFilter) new RtpSourceFilter(); int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter"); if (hr != 0) { Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr); return(false); } // add preferred video & audio codecs Log.Info("RTSPPlayer: add video/audio codecs"); string strVideoCodec = ""; string strAudioCodec = ""; string strAudiorenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters string postProcessingFilterSection = "mytv"; using (Settings xmlreader = new MPSettings()) { if (_mediaType == g_Player.MediaType.Video) { strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "movieplayer"; } else { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "mytv"; } enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false); // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } } string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant(); if (IsRadio == false) { if (strVideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); } } if (strAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); } if (enableDvbSubtitles == true) { try { _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder); SubtitleRenderer.GetInstance().SetPlayer(this); dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null)); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]); } if (strAudiorenderer.Length > 0) { audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false); } Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile); IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource; if (interfaceFile == null) { Log.Error("RTSPPlayer:Failed to get IFileSourceFilter"); return(false); } //Log.Info("RTSPPlayer: open file:{0}",filename); hr = interfaceFile.Load(m_strCurrentFile, null); if (hr != 0) { Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr); return(false); } #region connect rtspsource->demux Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux"); IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0); if (pinTsOut == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0); if (pinDemuxIn == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } hr = graphBuilder.Connect(pinTsOut, pinDemuxIn); if (hr != 0) { Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr); return(false); } DirectShowUtil.ReleaseComObject(pinTsOut); DirectShowUtil.ReleaseComObject(pinDemuxIn); #endregion #region render demux output pins if (IsRadio) { Log.Info("RTSPPlayer:render audio demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { graphBuilder.Render(pins[0]); break; } } } } else { Log.Info("RTSPPlayer:render audio/video demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } graphBuilder.Render(pins[0]); } } #endregion // Connect DVB subtitle filter pins in the graph if (_mpegDemux != null && enableDvbSubtitles == true) { IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer; hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr); if (hr == 0) { Log.Info("RTSPPlayer:_pinPcr OK"); IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr"); IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr"); hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr); } else { Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle); if (hr == 0) { Log.Info("RTSPPlayer:_pinSubtitle OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT); if (hr == 0) { Log.Info("RTSPPlayer:_pinPMT OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr); } } if (IsRadio == false) { if (!VMR9Util.g_vmr9.IsVMR9Connected) { //VMR9 is not supported, switch to overlay Log.Info("RTSPPlayer: vmr9 not connected"); _mediaCtrl = null; Cleanup(); return(false); } VMR9Util.g_vmr9.SetDeinterlaceMode(); } _mediaCtrl = (IMediaControl)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; _mediaSeeking = (IMediaSeeking)graphBuilder; mediaPos = (IMediaPosition)graphBuilder; basicAudio = graphBuilder as IBasicAudio; //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched); DirectShowUtil.EnableDeInterlace(graphBuilder); if (VMR9Util.g_vmr9 != null) { m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth; m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight; } if (audioRendererFilter != null) { Log.Info("RTSPPlayer9:set reference clock"); IMediaFilter mp = graphBuilder as IMediaFilter; IReferenceClock clock = audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr); } Log.Info("RTSPPlayer: graph build successfull"); return(true); } catch (Exception ex) { Error.SetError("Unable to play movie", "Unable build graph for VMR9"); Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); CloseInterfaces(); return(false); } }
/// <summary> /// Worker thread that captures the images /// </summary> private void RunWorker() { try { // Create the main graph m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString); // Create the grabber m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; m_grabberObject = m_isplGrabber as IBaseFilter; // Add the source and grabber to the main graph m_igrphbldGraph.AddFilter(m_sourceObject, "source"); m_igrphbldGraph.AddFilter(m_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; m_isplGrabber.SetMediaType(mediaType); if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); m_grbrCapGrabber.Width = header.BmiHeader.Width; m_grbrCapGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch (Exception retryException) { // Trace Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0)); m_isplGrabber.SetBufferSamples(false); m_isplGrabber.SetOneShot(false); m_isplGrabber.SetCallback(m_grbrCapGrabber, 1); // Get the video window IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph; wnd.put_AutoShow(false); wnd = null; // Create the control and run m_imedctrlControl = (IMediaControl)m_igrphbldGraph; m_imedctrlControl.Run(); // Wait for the stop signal while (!m_rstevStopSignal.WaitOne(0, true)) { Thread.Sleep(10); } // Stop when ready // _control.StopWhenReady(); m_imedctrlControl.Stop(); // Wait a bit... It apparently takes some time to stop IMediaControl Thread.Sleep(1000); } } catch (Exception ex) { // Trace Trace.WriteLine(ex); } finally { // Clean up this.Release(); } }
/* * TIVO Files Pin Mapping (pin name between ||) (NOTE: XXXX changes from each machine and AC3 changes if the audio codec changes) * Audio -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |AC3 (PID XXXX @ Prog# 1)| -> Dump |Input| * Video -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |Video (PID XXXX @ Prog# 1)| -> Dump |Input| */ public void BuildGraph() { int hr; IntPtr fetched = IntPtr.Zero; IntPtr fetched2 = IntPtr.Zero; IEnumPins FilterPins; IPin[] pins = new IPin[1]; string PinID; // TiVO Directshow filters are only accessible through userspace otherwise decryption fails, so if we are running the engine as a service (instead of command line) we should prompt the user if ((_Ext == "tivo") && GlobalDefs.IsEngineRunningAsService) { _jobLog.WriteEntry(this, "You need to start MCEBuddy engine as a Command line program. TiVO Desktop Directshow decryption filters do not work with a Windows Service.", Log.LogEntryType.Error); } // Create the source filter for dvrms or wtv or TIVO (will automatically connect to TIVODecryptorTag in source itself) _jobLog.WriteEntry(this, "Loading file using DirectShow source filter", Log.LogEntryType.Debug); hr = _gb.AddSourceFilter(_SourceFile, "Source Filter", out _SourceF); checkHR(hr); // If this is a TIVO while, while the source filter automatically decrypts the inputs we need to connect the MPEG demultiplexer to get the audio and video output pins if (_Ext == "tivo") { IPin PinOut, PinIn; IntPtr ptr; PinInfo demuxPinInfo; List <IBaseFilter> filterList = new List <IBaseFilter>(); // Check if the source filter is a TiVO source filter (otherwise sometimes it tries to use the normal source filter which will fail since the stream in encrypted) string vendorInfo; FilterInfo filterInfo; _SourceF.QueryFilterInfo(out filterInfo); _SourceF.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "TiVO Source filter loaded by Directshow -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (vendorInfo == null || !vendorInfo.ToLower().Contains("tivo")) { string exception = ""; // Check if you are running 64Bit MCEBuddy, TiVO needs 32bit MCEBuddy since TiVO directshow dll are 32bit and can only be loaded by 32bit processes if (IntPtr.Size == 8) { exception += "You need to run 32bit MCEBuddy, TiVO Directshow fiters cannot be accessed by a 64bit program."; } else { exception += "TiVO Desktop installation not detected by Windows DirectShow."; } throw new Exception(exception); // Get out of here and let the parent know something is wrong } hr = _SourceF.FindPin("Output", out PinOut); // Get the Source filter pinOut |Output| checkHR(hr); // When TIVO desktop is installed, Render automatically builds the filter graph with the necessary demuxing filters - we cannot manually add the MainConcept demux filter since the class isn't registered but somehow Render is able to find it and load it (along with other redundant filters like DTV, audio etc which we need to remove) _jobLog.WriteEntry(this, "DirectShow building TiVO filter chain", Log.LogEntryType.Debug); hr = _gb.Render(PinOut); checkHR(hr); hr = PinOut.ConnectedTo(out ptr); // Find out which input Pin (Mainconcept Demux filter) the output of the Source Filter is connected to checkHR(hr); PinIn = (IPin)Marshal.GetObjectForIUnknown(ptr); hr = PinIn.QueryPinInfo(out demuxPinInfo); // Get the mainconcept demux filter from the pin checkHR(hr); demuxPinInfo.filter.QueryFilterInfo(out filterInfo); demuxPinInfo.filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Checking downstream TiVO filter chain starting with TiVO Demux filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (!GetFilterChain(demuxPinInfo.filter, PinDirection.Output, filterList)) // Get the list of all downstreams (redudant) filters (like DTV, Audio, video render etc) from the demux filter that were added by the automatic Render function above (check if there are no downstream filters, then TIVO desktop is not installed) { throw new Exception("Unable to get TIVO filter chain"); } // Now remove all the filters in the chain downstream after the demux filter from the graph builder (we dont' need them, we will add out own filters later) _jobLog.WriteEntry(this, "Removing redundant filters from TiVO filter chain", Log.LogEntryType.Debug); foreach (IBaseFilter filter in filterList) { filter.QueryFilterInfo(out filterInfo); filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Removing filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); _gb.RemoveFilter(filter); Marshal.FinalReleaseComObject(filter); // Release the COM object } // Now the TIVO MainConcept Demux Filter is our new "Source" filter _SourceF = demuxPinInfo.filter; } // TODO: We need to find a way to insert a filter which can allow us to select audio streams (e.g. LAV filter, currently it only allows us access to the default audio stream and not multiple audio streams) // Cycle through pins, connecting as appropriate hr = _SourceF.EnumPins(out FilterPins); checkHR(hr); while (FilterPins.Next(pins.Length, pins, fetched) == 0) { IntPtr ptypes = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(IntPtr))); AMMediaType mtypes; IEnumMediaTypes enummtypes; IntPtr ptrEnum; pins[0].EnumMediaTypes(out ptrEnum); enummtypes = (IEnumMediaTypes)Marshal.GetObjectForIUnknown(ptrEnum); while (enummtypes.Next(1, ptypes, fetched2) == 0) { /* Extract Audio, Video or Subtitle streams -> References: * http://nate.deepcreek.org.au/svn/DigitalWatch/trunk/bin/MediaTypes.txt * http://msdn.microsoft.com/en-us/library/ms932033.aspx * https://sourceforge.net/p/tsubget/home/Dumping%20a%20Stream/ * http://msdn.microsoft.com/en-us/library/windows/desktop/dd695343(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd390660(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd407354(v=vs.85).aspx * http://whrl.pl/RcRv5p (extracting Teletext from WTV/DVRMS) */ IntPtr ptrStructure = Marshal.ReadIntPtr(ptypes); mtypes = (AMMediaType)Marshal.PtrToStructure(ptrStructure, typeof(AMMediaType)); if ((mtypes.majorType == MediaType.Video) || (mtypes.majorType == MediaType.Audio) || (mtypes.majorType == MediaType.Mpeg2PES) || (mtypes.majorType == MediaType.Stream) || (mtypes.majorType == MediaType.AuxLine21Data) || (mtypes.majorType == MediaType.VBI) || (mtypes.majorType == MediaType.MSTVCaption) || (mtypes.majorType == MediaType.DTVCCData) || (mtypes.majorType == MediaType.Mpeg2Sections && mtypes.subType == MediaSubType.None && mtypes.formatType == FormatType.None)) { string DumpFileName = ""; if ((mtypes.majorType == MediaType.Video) && ((_extractMediaType & ExtractMediaType.Video) != 0)) // Video { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_VIDEO"); _VideoPart = DumpFileName; _jobLog.WriteEntry(this, "Found Video stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } else if (((mtypes.majorType == MediaType.Audio) || // Audio types https://msdn.microsoft.com/en-us/library/windows/desktop/dd390676(v=vs.85).aspx ((mtypes.majorType == MediaType.Mpeg2PES) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.DTS) || (mtypes.subType == MediaSubType.DvdLPCMAudio) || (mtypes.subType == MediaSubType.Mpeg2Audio))) || ((mtypes.majorType == MediaType.Stream) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.MPEG1Audio) || (mtypes.subType == MediaSubType.Mpeg2Audio) || (mtypes.subType == MediaSubType.DolbyDDPlus) || (mtypes.subType == MediaSubType.MpegADTS_AAC) || (mtypes.subType == MediaSubType.MpegLOAS))) ) && ((_extractMediaType & ExtractMediaType.Audio) != 0)) { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_AUDIO" + AudioParts.Count.ToString()); _AudioParts.Add(DumpFileName); _jobLog.WriteEntry(this, "Found Audio stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } else if ((_extractMediaType & ExtractMediaType.Subtitle) != 0)// Subtitles { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_SUBTITLE" + SubtitleParts.Count.ToString()); SubtitleParts.Add(DumpFileName); _jobLog.WriteEntry(this, "Found Subtitle stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } if (!String.IsNullOrWhiteSpace(DumpFileName)) // If we are asked to extract something { hr = pins[0].QueryId(out PinID); ConnectDecryptedDump(PinID, DumpFileName); } } else { // Debug - looking for more subtitle types (very poorly documented by Microsoft) Guid type = mtypes.majorType; Guid subtype = mtypes.subType; Guid formattyype = mtypes.formatType; } } Marshal.FreeCoTaskMem(ptypes); // Free up the memory } }
public void WorkerThread() { Grabber pCallback = new Grabber(this); object o = null; object ppvResult = null; object obj4 = null; IGraphBuilder builder = null; IBaseFilter pFilter = null; IBaseFilter filter2 = null; ISampleGrabber grabber2 = null; IMediaControl control = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } o = Activator.CreateInstance(typeFromCLSID); builder = (IGraphBuilder)o; UCOMIBindCtx ppbc = null; UCOMIMoniker ppmk = null; int pchEaten = 0; if (Win32.CreateBindCtx(0, out ppbc) == 0) { if (Win32.MkParseDisplayName(ppbc, this.source, ref pchEaten, out ppmk) == 0) { Guid gUID = typeof(IBaseFilter).GUID; ppmk.BindToObject(null, null, ref gUID, out ppvResult); Marshal.ReleaseComObject(ppmk); ppmk = null; } Marshal.ReleaseComObject(ppbc); ppbc = null; } if (ppvResult == null) { throw new ApplicationException("Failed creating device object for moniker"); } pFilter = (IBaseFilter)ppvResult; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj4 = Activator.CreateInstance(typeFromCLSID); grabber2 = (ISampleGrabber)obj4; filter2 = (IBaseFilter)obj4; builder.AddFilter(pFilter, "source"); builder.AddFilter(filter2, "grabber"); AMMediaType pmt = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24 }; grabber2.SetMediaType(pmt); if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } if (grabber2.GetConnectedMediaType(pmt) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader)); pCallback.Width = header.BmiHeader.Width; pCallback.Height = header.BmiHeader.Height; pmt.Dispose(); } builder.Render(DSTools.GetOutPin(filter2, 0)); grabber2.SetBufferSamples(false); grabber2.SetOneShot(false); grabber2.SetCallback(pCallback, 1); ((IVideoWindow)o).put_AutoShow(false); control = (IMediaControl)o; control.Run(); while (!this.stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } control.StopWhenReady(); } catch (Exception) { } finally { control = null; builder = null; pFilter = null; filter2 = null; grabber2 = null; if (o != null) { Marshal.ReleaseComObject(o); o = null; } if (ppvResult != null) { Marshal.ReleaseComObject(ppvResult); ppvResult = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } } }
public static VideoInfoHeader2 GetSBEFrameSize(string pathToFile) { int hr = 0; IGraphBuilder graph = null; IBaseFilter capFilter = null; IBaseFilter nRender = null; try { graph = (IGraphBuilder) new FilterGraph(); hr = graph.AddSourceFilter(pathToFile, "Source", out capFilter); DsError.ThrowExceptionForHR(hr); #if DEBUG using (DsROTEntry rot = new DsROTEntry(graph)) { #endif IPin vPin = null; IBaseFilter dec = null; IPin sgIn = null; IBaseFilter mpegDec = null; try { dec = (IBaseFilter) new DTFilter(); hr = graph.AddFilter(dec, "Decrypt"); DsError.ThrowExceptionForHR(hr); nRender = (IBaseFilter) new NullRenderer(); hr = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer"); DsError.ThrowExceptionForHR(hr); IBaseFilter dec1 = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Decrypt.DTFilterPBDA, ref graph, "Decrypt1"); if (dec1 != null) { Marshal.ReleaseComObject(dec1); } dec1 = null; mpegDec = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Video.VideoDecoderMpeg, ref graph, "MS MPEG Decoder"); sgIn = DsFindPin.ByDirection(mpegDec, PinDirection.Input, 0); IEnumPins ppEnum; IPin[] pPins = new IPin[1]; hr = capFilter.EnumPins(out ppEnum); DsError.ThrowExceptionForHR(hr); try { while (ppEnum.Next(1, pPins, IntPtr.Zero) == 0) { IEnumMediaTypes emtDvr = null; AMMediaType[] amtDvr = new AMMediaType[1]; try { pPins[0].EnumMediaTypes(out emtDvr); hr = emtDvr.Next(1, amtDvr, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); if (amtDvr[0].majorType == MediaType.Video) { if (graph.Connect(pPins[0], sgIn) >= 0) { vPin = pPins[0]; break; } } if (pPins[0] != null) { Marshal.ReleaseComObject(pPins[0]); } } finally { if (emtDvr != null) { Marshal.ReleaseComObject(emtDvr); } DsUtils.FreeAMMediaType(amtDvr[0]); } } } finally { if (ppEnum != null) { Marshal.ReleaseComObject(ppEnum); } } FilterGraphTools.RenderPin(graph, mpegDec, "Video Output 1"); } finally { if (vPin != null) { Marshal.ReleaseComObject(vPin); } if (dec != null) { Marshal.ReleaseComObject(dec); } if (sgIn != null) { Marshal.ReleaseComObject(sgIn); } if (mpegDec != null) { Marshal.ReleaseComObject(mpegDec); } } EventCode ec; IMediaControl mControl = graph as IMediaControl; IMediaEvent mEvent = graph as IMediaEvent; hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Run(); DsError.ThrowExceptionForHR(hr); hr = mEvent.WaitForCompletion(1000, out ec); //DsError.ThrowExceptionForHR(hr); hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Stop(); DsError.ThrowExceptionForHR(hr); IPin mpgOut = null; sgIn = null; AMMediaType mt = new AMMediaType(); try { sgIn = DsFindPin.ByDirection(nRender, PinDirection.Input, 0); if (sgIn != null) { hr = sgIn.ConnectedTo(out mpgOut); DsError.ThrowExceptionForHR(hr); hr = graph.RemoveFilter(nRender); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(nRender); nRender = null; nRender = (IBaseFilter) new NullRenderer(); hr = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer"); DsError.ThrowExceptionForHR(hr); hr = graph.Render(mpgOut); DsError.ThrowExceptionForHR(hr); hr = mpgOut.ConnectionMediaType(mt); DsError.ThrowExceptionForHR(hr); if (mt.formatType == FormatType.VideoInfo2) { VideoInfoHeader2 vih = (VideoInfoHeader2)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader2)); return(vih); } } } finally { DsUtils.FreeAMMediaType(mt); if (mpgOut != null) { Marshal.ReleaseComObject(mpgOut); } if (sgIn != null) { Marshal.ReleaseComObject(sgIn); } } #if DEBUG } #endif } finally { if (nRender != null) { Marshal.ReleaseComObject(nRender); } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } if (graph != null) { while (Marshal.ReleaseComObject(graph) > 0) { ; } } } return(null); }
/// <summary> /// Worker thread that captures the images /// </summary> private void RunWorker() { try { // Create the main graph _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source _sourceObject = FilterInfo.CreateFilter(_monikerString); // Create the grabber _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; _grabberObject = _grabber as IBaseFilter; // Add the source and grabber to the main graph _graph.AddFilter(_sourceObject, "source"); _graph.AddFilter(_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; _grabber.SetMediaType(mediaType); if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (_grabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); _capGrabber.Width = header.BmiHeader.Width; _capGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch (Exception retryException) { // Trace Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0)); _grabber.SetBufferSamples(false); _grabber.SetOneShot(false); _grabber.SetCallback(_capGrabber, 1); // Get the video window IVideoWindow wnd = (IVideoWindow)_graph; wnd.put_AutoShow(false); wnd = null; // Create the control and run _control = (IMediaControl)_graph; _control.Run(); // Wait for the stop signal while (!_stopSignal.WaitOne(0, true)) { Thread.Sleep(10); } // Stop when ready _control.StopWhenReady(); } } catch (Exception ex) { // Trace Trace.WriteLine(ex); } finally { // Clean up Release(); } }
/// <summary> /// Configures the DirectShow graph to play the selected video capture /// device with the selected parameters /// </summary> private void SetupGraph() { /* Clean up any messes left behind */ FreeResources(); try { /* Create a new graph */ m_graph = (IGraphBuilder) new FilterGraphNoThread(); #if DEBUG m_rotEntry = new DsROTEntry(m_graph); #endif /* Create a capture graph builder to help * with rendering a capture graph */ var captureGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); /* Set our filter graph to the capture graph */ int hr = captureGraph.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); /* Add our capture device source to the graph */ if (m_videoCaptureSourceChanged) { m_captureDevice = AddFilterByName(m_graph, FilterCategory.VideoInputDevice, VideoCaptureSource); m_videoCaptureSourceChanged = false; } else if (m_videoCaptureDeviceChanged) { m_captureDevice = AddFilterByDevicePath(m_graph, FilterCategory.VideoInputDevice, VideoCaptureDevice.DevicePath); m_videoCaptureDeviceChanged = false; } /* If we have a null capture device, we have an issue */ if (m_captureDevice == null) { throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource)); } if (UseYuv && !EnableSampleGrabbing) { /* Configure the video output pin with our parameters and if it fails * then just use the default media subtype*/ if (!SetVideoCaptureParameters(captureGraph, m_captureDevice, MediaSubType.YUY2)) { SetVideoCaptureParameters(captureGraph, m_captureDevice, Guid.Empty); } } else { /* Configure the video output pin with our parameters */ SetVideoCaptureParameters(captureGraph, m_captureDevice, Guid.Empty); } var rendererType = VideoRendererType.VideoMixingRenderer9; /* Creates a video renderer and register the allocator with the base class */ m_renderer = CreateVideoRenderer(rendererType, m_graph, 1); if (rendererType == VideoRendererType.VideoMixingRenderer9) { var mixer = m_renderer as IVMRMixerControl9; if (mixer != null && !EnableSampleGrabbing && UseYuv) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetYUV; /* Prefer YUV */ mixer.SetMixingPrefs(dwPrefs); } } if (EnableSampleGrabbing) { m_sampleGrabber = (ISampleGrabber) new SampleGrabber(); SetupSampleGrabber(m_sampleGrabber); hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); } var videoOutPin = DsFindPin.ByDirection(m_captureDevice, PinDirection.Output, 0); if (videoOutPin == null) { throw new Exception("Could not query the video output pin on source filter"); } /* Intelligently connect the pins in the graph to the renderer */ hr = m_graph.Render(videoOutPin); Marshal.ReleaseComObject(videoOutPin); //hr = captureGraph.RenderStream(PinCategory.Capture, // MediaType.Video, // m_captureDevice, // null, // m_renderer); DsError.ThrowExceptionForHR(hr); /* Register the filter graph * with the base classes */ SetupFilterGraph(m_graph); /* Sets the NaturalVideoWidth/Height */ SetNativePixelSizes(m_renderer); HasVideo = true; /* Make sure we Release() this COM reference */ Marshal.ReleaseComObject(captureGraph); } catch (Exception ex) { /* Something got fuct up */ FreeResources(); InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } /* Success */ InvokeMediaOpened(); }
// Thread entry point public void WorkerThread() { bool failed = false; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object sourceObj = null; object grabberObj = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sg = null; IFileSourceFilter fileSource = null; IMediaControl mc = null; IMediaEventEx mediaEvent = null; int code, param1, param2; while ((!failed) && (!stopEvent.WaitOne(0, true))) { try { // Get type for filter graph Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (srvType == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObj = Activator.CreateInstance(srvType); graph = (IGraphBuilder)graphObj; // Get type for windows media source filter srvType = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource); if (srvType == null) { throw new ApplicationException("Failed creating WM source"); } // create windows media source filter sourceObj = Activator.CreateInstance(srvType); sourceBase = (IBaseFilter)sourceObj; // Get type for sample grabber srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (srvType == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObj = Activator.CreateInstance(srvType); sg = (ISampleGrabber)grabberObj; grabberBase = (IBaseFilter)grabberObj; // add source filter to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // load file fileSource = (IFileSourceFilter)sourceObj; fileSource.Load(this.source, null); // connect pins if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graph.Render(DSTools.GetOutPin(grabberBase, 0)); // sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get events interface mediaEvent = (IMediaEventEx)graphObj; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); // get an event if (mediaEvent.GetEvent(out code, out param1, out param2, 0) == 0) { // release params mediaEvent.FreeEventParams(code, param1, param2); // if (code == (int)EventCode.Complete) { break; } } } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); failed = true; } // finalization block finally { // release all objects mediaEvent = null; mc = null; fileSource = null; graph = null; sourceBase = null; grabberBase = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (sourceObj != null) { Marshal.ReleaseComObject(sourceObj); sourceObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } } }
public static IBaseFilter AddAudioRendererToGraph(IGraphBuilder graphBuilder, string strFilterName, bool setAsReferenceClock) { try { IPin pinOut = null; IBaseFilter NewFilter = null; IEnumFilters enumFilters; HResult hr = new HResult(graphBuilder.EnumFilters(out enumFilters)); Log.Info("DirectShowUtils: First try to insert new audio renderer {0} ", strFilterName); // next add the new one... foreach (Filter filter in Filters.AudioRenderers) { if (String.Compare(filter.Name, strFilterName, true) == 0) { Log.Info("DirectShowUtils: Found audio renderer"); NewFilter = (IBaseFilter)Marshal.BindToMoniker(filter.MonikerString); hr.Set(graphBuilder.AddFilter(NewFilter, strFilterName)); if (hr < 0) { Log.Error("DirectShowUtils: unable to add filter:{0} to graph", strFilterName); NewFilter = null; } else { Log.Debug("DirectShowUtils: added filter:{0} to graph", strFilterName); if (pinOut != null) { hr.Set(graphBuilder.Render(pinOut)); if (hr == 0) { Log.Info(" pinout rendererd"); } else { Log.Error(" failed: pinout render"); } } if (setAsReferenceClock) { hr.Set((graphBuilder as IMediaFilter).SetSyncSource(NewFilter as IReferenceClock)); if (hr != 0) { Log.Warn("setAsReferenceClock sync source " + hr.ToDXString()); } } return NewFilter; } } //if (String.Compare(filter.Name,strFilterName,true) ==0) } //foreach (Filter filter in filters.AudioRenderers) if (NewFilter == null) { Log.Error("DirectShowUtils: failed filter {0} not found", strFilterName); } } catch {} Log.Info("DirectShowUtils: First try to insert new audio renderer {0} failed ", strFilterName); try { IPin pinOut = null; IBaseFilter NewFilter = null; Log.Info("add filter:{0} to graph clock:{1}", strFilterName, setAsReferenceClock); //check first if audio renderer exists! bool bRendererExists = false; foreach (Filter filter in Filters.AudioRenderers) { if (String.Compare(filter.Name, strFilterName, true) == 0) { bRendererExists = true; Log.Info("DirectShowUtils: found renderer - {0}", filter.Name); } } if (!bRendererExists) { Log.Error("FAILED: audio renderer:{0} doesnt exists", strFilterName); return null; } // first remove all audio renderers bool bAllRemoved = false; bool bNeedAdd = true; IEnumFilters enumFilters; HResult hr = new HResult(graphBuilder.EnumFilters(out enumFilters)); if (hr >= 0 && enumFilters != null) { int iFetched; enumFilters.Reset(); while (!bAllRemoved) { IBaseFilter[] pBasefilter = new IBaseFilter[2]; hr.Set(enumFilters.Next(1, pBasefilter, out iFetched)); if (hr < 0 || iFetched != 1 || pBasefilter[0] == null) { break; } foreach (Filter filter in Filters.AudioRenderers) { Guid classId1; Guid classId2; pBasefilter[0].GetClassID(out classId1); //Log.Info("Filter Moniker string - " + filter.Name); if (filter.Name == "ReClock Audio Renderer") { Log.Warn( "Reclock is installed - if this method fails, reinstall and regsvr32 /u reclock and then uninstall"); // return null; } try { NewFilter = (IBaseFilter)Marshal.BindToMoniker(filter.MonikerString); if (NewFilter == null) { Log.Info("NewFilter = null"); continue; } } catch (Exception e) { Log.Info("Exception in BindToMoniker({0}): {1}", filter.MonikerString, e.Message); continue; } NewFilter.GetClassID(out classId2); ReleaseComObject(NewFilter); NewFilter = null; if (classId1.Equals(classId2)) { if (filter.Name == strFilterName) { Log.Info("filter already in graph"); if (setAsReferenceClock) { hr.Set((graphBuilder as IMediaFilter).SetSyncSource(pBasefilter[0] as IReferenceClock)); if (hr != 0) { Log.Warn("setAsReferenceClock sync source " + hr.ToDXString()); } } ReleaseComObject(pBasefilter[0]); pBasefilter[0] = null; bNeedAdd = false; break; } else { Log.Info("remove " + filter.Name + " from graph"); pinOut = FindSourcePinOf(pBasefilter[0]); graphBuilder.RemoveFilter(pBasefilter[0]); bAllRemoved = true; break; } } //if (classId1.Equals(classId2)) } //foreach (Filter filter in filters.AudioRenderers) if (pBasefilter[0] != null) { ReleaseComObject(pBasefilter[0]); } } //while(!bAllRemoved) ReleaseComObject(enumFilters); } //if (hr>=0 && enumFilters!=null) Log.Info("DirectShowUtils: Passed removing audio renderer"); if (!bNeedAdd) { return null; } // next add the new one... foreach (Filter filter in Filters.AudioRenderers) { if (String.Compare(filter.Name, strFilterName, true) == 0) { Log.Info("DirectShowUtils: Passed finding Audio Renderer"); NewFilter = (IBaseFilter)Marshal.BindToMoniker(filter.MonikerString); hr.Set(graphBuilder.AddFilter(NewFilter, strFilterName)); if (hr < 0) { Log.Error("failed:unable to add filter:{0} to graph", strFilterName); NewFilter = null; } else { Log.Debug("added filter:{0} to graph", strFilterName); if (pinOut != null) { hr.Set(graphBuilder.Render(pinOut)); if (hr == 0) { Log.Info(" pinout rendererd"); } else { Log.Error(" failed: pinout render"); } } if (setAsReferenceClock) { hr.Set((graphBuilder as IMediaFilter).SetSyncSource(NewFilter as IReferenceClock)); if (hr != 0) { Log.Warn("setAsReferenceClock sync source " + hr.ToDXString()); } } return NewFilter; } } //if (String.Compare(filter.Name,strFilterName,true) ==0) } //foreach (Filter filter in filters.AudioRenderers) if (NewFilter == null) { Log.Error("failed filter:{0} not found", strFilterName); } } catch (Exception ex) { Log.Error("DirectshowUtil. Failed to add filter:{0} to graph :{1} {2} {3}", strFilterName, ex.Message, ex.Source, ex.StackTrace); } return null; }
public static void RenderOutputPins(IGraphBuilder graphBuilder, IBaseFilter filter) { if (graphBuilder == null) throw new ArgumentNullException("graphBuilder"); if (filter == null) throw new ArgumentNullException("filter"); int hr = 0; if (filter == null) throw new ArgumentNullException("filter"); IEnumPins enumPins; var pins = new IPin[1]; IntPtr fetched = IntPtr.Zero; hr = filter.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); try { while (enumPins.Next(pins.Length, pins, fetched) == 0) { try { PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { hr = graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); } } finally { Marshal.ReleaseComObject(pins[0]); } } } finally { Marshal.ReleaseComObject(enumPins); } }
public static void RenderUnconnectedOutputPins(IGraphBuilder graphBuilder, IBaseFilter baseFilter) { if (baseFilter == null) return; int fetched; IEnumPins pinEnum; int hr = baseFilter.EnumPins(out pinEnum); DsError.ThrowExceptionForHR(hr); if (hr == 0 && pinEnum != null) { pinEnum.Reset(); IPin[] pins = new IPin[1]; while (pinEnum.Next(1, pins, out fetched) == 0 && fetched > 0) { PinDirection pinDir; pins[0].QueryDirection(out pinDir); if (pinDir == PinDirection.Output && !HasConnection(pins[0])) { FilterInfo i; PinInfo pinInfo; string pinName = string.Empty; if (baseFilter.QueryFilterInfo(out i) == 0) { if (pins[0].QueryPinInfo(out pinInfo) == 0) { Log.Debug("Filter: {0} - try to connect: {1}", i.achName, pinInfo.name); pinName = pinInfo.name; DsUtils.FreePinInfo(pinInfo); } } ReleaseComObject(i.pGraph); hr = graphBuilder.Render(pins[0]); if (hr != 0) Log.Debug(" - failed"); } ReleaseComObject(pins[0]); } ReleaseComObject(pinEnum); } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEvent = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add grabber filters to graph graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins int pinToTry = 0; IPin inPin = Tools.GetInPin(grabberBase, 0); IPin outPin = null; // find output pin acceptable by sample grabber while (true) { outPin = Tools.GetOutPin(sourceBase, pinToTry); if (outPin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graph.Connect(outPin, inPin) < 0) { Marshal.ReleaseComObject(outPin); outPin = null; pinToTry++; } else { break; } } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; // run mediaControl.Run( ); do { if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.Complete) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } } } }while (!stopEvent.WaitOne(100, false)); mediaControl.Stop( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; grabberBase = null; sampleGrabber = null; mediaControl = null; mediaEvent = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceBase != null) { Marshal.ReleaseComObject(sourceBase); sourceBase = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
private void PlayMovieInWindow(string filename) { WindowsMediaLib.IWMReaderAdvanced2 wmReader = null; IBaseFilter sourceFilter = null; try { FileLogger.Log("PlayMovieInWindow: {0}", filename); lastJump = 0; int hr = 0; if (filename == string.Empty) return; this.graphBuilder = (IGraphBuilder)new FilterGraph(); FileLogger.Log("PlayMovieInWindow: Create Graph"); this.evrRenderer = FilterGraphTools.AddFilterFromClsid(this.graphBuilder, new Guid("{FA10746C-9B63-4B6C-BC49-FC300EA5F256}"), "EVR"); if (evrRenderer != null) { FileLogger.Log("PlayMovieInWindow: Add EVR"); SetupEvrDisplay(); //#if DEBUG if (ps.PublishGraph) rot = new DsROTEntry(this.graphBuilder); //#endif IObjectWithSite grfSite = graphBuilder as IObjectWithSite; if (grfSite != null) grfSite.SetSite(new FilterBlocker(filename)); string fileExt = Path.GetExtension(filename).ToLower(); if (ps.PreferredDecoders != null) { foreach (string pa in ps.PreferredDecoders) { string[] pvA = pa.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); if (pvA[0].ToLower() == fileExt) { for (int i = 1; i < pvA.Length; i++) { string strFilter = pvA[i].Trim(); IBaseFilter filter = null; try { if (Regex.IsMatch(strFilter, @"{?\w{8}-\w{4}-\w{4}-\w{4}-\w{12}}?")) filter = FilterGraphTools.AddFilterFromClsid(graphBuilder, new Guid(strFilter), strFilter); else filter = FilterGraphTools.AddFilterByName(graphBuilder, FilterCategory.LegacyAmFilterCategory, strFilter); if (filter != null) { FileLogger.Log("Added {0} to the graph", strFilter); } else FileLogger.Log("{0} not added to the graph", strFilter); } finally { if (filter != null) Marshal.ReleaseComObject(filter); filter = null; } } } } } // Have the graph builder construct its the appropriate graph automatically //hr = this.graphBuilder.RenderFile(filename, null); if (ps.UseCustomAudioRenderer) { m_audioRendererClsid = new Guid(ps.CustomAudioRender); } audioRenderer = FilterGraphTools.AddFilterFromClsid(graphBuilder, m_audioRendererClsid, "Audio Renderer"); //IAVSyncClock wtf = audioRenderer as IAVSyncClock; //double cap; //hr = wtf.GetBias(out cap); //IMPAudioSettings arSett = audioRenderer as IMPAudioSettings; //if (arSett != null) //{ // AC3Encoding ac3Mode; // hr = arSett.GetAC3EncodingMode(out ac3Mode); // SpeakerConfig sc; // hr = arSett.GetSpeakerConfig(out sc); // AUDCLNT_SHAREMODE sm; // hr = arSett.GetWASAPIMode(out sm); // bool em; // hr = arSett.GetUseWASAPIEventMode(out em); // /*DeviceDefinition[] */IntPtr dc; // //int count; // //hr = arSett.GetAvailableAudioDevices(out dc, out count); // //DsError.ThrowExceptionForHR(hr); // ////DeviceDefinition[] dd = new DeviceDefinition[count]; // //AudioDeviceDefinition dd = (AudioDeviceDefinition)Marshal.PtrToStructure(dc, typeof(AudioDeviceDefinition)); // //if (dc != null) // // Marshal.ReleaseComObject(dc); // hr = arSett.SetAudioDeviceById(null); // //arSett.SetSpeakerMatchOutput(true); // arSett.SetUseWASAPIEventMode(true); // arSett.SetUseFilters((int)MPARUseFilters.ALL); // arSett.SetAllowBitStreaming(true); // arSett.SetAC3EncodingMode(AC3Encoding.DISABLED); // arSett.SetUseTimeStretching(false); //} IMPAudioRendererConfig arSett = audioRenderer as IMPAudioRendererConfig; if (arSett != null) { int ac3Mode; hr = arSett.GetInt(MPARSetting.AC3_ENCODING, out ac3Mode); int sc; hr = arSett.GetInt(MPARSetting.SPEAKER_CONFIG, out sc); int sm; hr = arSett.GetInt(MPARSetting.WASAPI_MODE, out sm); bool em; hr = arSett.GetBool(MPARSetting.WASAPI_EVENT_DRIVEN, out em); /*DeviceDefinition[] */ IntPtr dc; //int count; //hr = arSett.GetAvailableAudioDevices(out dc, out count); //DsError.ThrowExceptionForHR(hr); ////DeviceDefinition[] dd = new DeviceDefinition[count]; //AudioDeviceDefinition dd = (AudioDeviceDefinition)Marshal.PtrToStructure(dc, typeof(AudioDeviceDefinition)); //if (dc != null) // Marshal.ReleaseComObject(dc); hr = arSett.SetString(MPARSetting.SETTING_AUDIO_DEVICE, ps.AudioPlaybackDevice); //arSett.SetSpeakerMatchOutput(true); arSett.SetBool(MPARSetting.WASAPI_EVENT_DRIVEN, true); arSett.SetInt(MPARSetting.USE_FILTERS, (int)MPARUseFilters.ALL); arSett.SetBool(MPARSetting.ALLOW_BITSTREAMING, true); arSett.SetInt(MPARSetting.AC3_ENCODING, (int)AC3Encoding.DISABLED); arSett.SetBool(MPARSetting.ENABLE_TIME_STRETCHING, false); } //try //{ hr = graphBuilder.AddSourceFilter(filename, "Source", out sourceFilter); if (hr < 0) { //if it doesn't work before failing try to load it with the WMV reader sourceFilter = (IBaseFilter)new WMAsfReader(); hr = graphBuilder.AddFilter(sourceFilter, "WM/ASF Reader"); DsError.ThrowExceptionForHR(hr); hr = ((IFileSourceFilter)sourceFilter).Load(filename, null); DsError.ThrowExceptionForHR(hr); wmReader = sourceFilter as WindowsMediaLib.IWMReaderAdvanced2; } IPin outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); while (outPin != null) { try { hr = graphBuilder.Render(outPin); DsError.ThrowExceptionForHR(hr); } finally { if (outPin != null) Marshal.ReleaseComObject(outPin); outPin = null; } outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); } if (ps.MultiChannelWMA) { FileLogger.Log("Set multichannel mode for WMA"); IBaseFilter wmaDec = FilterGraphTools.FindFilterByName(graphBuilder, "WMAudio Decoder DMO"); if (wmaDec != null) { try { //http://msdn.microsoft.com/en-us/library/aa390550(VS.85).aspx IPropertyBag bag = wmaDec as IPropertyBag; if (bag != null) { object pVar; hr = bag.Read("_HIRESOUTPUT", out pVar, null); DsError.ThrowExceptionForHR(hr); bool bVar = (bool)pVar; FileLogger.Log("_HIRESOUTPUT = {0}", bVar); if (!bVar) { IPin wmaOut = DsFindPin.ByDirection(wmaDec, PinDirection.Output, 0); IPin cPin = null; try { hr = wmaOut.ConnectedTo(out cPin); DsError.ThrowExceptionForHR(hr); if (cPin != null) //cpin should never be null at this point, but lets be safe { hr = wmaOut.Disconnect(); DsError.ThrowExceptionForHR(hr); List<Guid> oldFilters = new List<Guid>(); IBaseFilter oFilt = FilterGraphTools.GetFilterFromPin(cPin); try { while (oFilt != null) { IBaseFilter cFilter = null; try { Guid clsid; hr = oFilt.GetClassID(out clsid); DsError.ThrowExceptionForHR(hr); if (clsid != DSOUND_RENDERER) { oldFilters.Add(clsid); cFilter = FilterGraphTools.GetConnectedFilter(oFilt, PinDirection.Output, 0); } hr = graphBuilder.RemoveFilter(oFilt); DsError.ThrowExceptionForHR(hr); } finally { if (oFilt != null) Marshal.ReleaseComObject(oFilt); oFilt = null; } oFilt = cFilter; } } finally { if (oFilt != null) Marshal.ReleaseComObject(oFilt); oFilt = null; } foreach (Guid addFilt in oldFilters) { IBaseFilter addMe = FilterGraphTools.AddFilterFromClsid(graphBuilder, addFilt, addFilt.ToString()); if (addMe != null) Marshal.ReleaseComObject(addMe); } } pVar = true; hr = bag.Write("_HIRESOUTPUT", ref pVar); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.Render(wmaOut); DsError.ThrowExceptionForHR(hr); } finally { if (wmaOut != null) Marshal.ReleaseComObject(wmaOut); if (cPin != null) Marshal.ReleaseComObject(cPin); } } } } catch (Exception ex) { FileLogger.Log("Error setting multichannel mode for WMA: {0}", ex.Message); } finally { while(Marshal.ReleaseComObject(wmaDec) > 0); } } } //} //finally //{ // if (sourceFilter != null) // Marshal.ReleaseComObject(sourceFilter); //} if (ps.DXVAWMV) { FileLogger.Log("Set DXVA for WMV"); IBaseFilter wmvDec = FilterGraphTools.FindFilterByName(graphBuilder, "WMVideo Decoder DMO"); if (wmvDec != null) { try { MediaFoundation.Misc.IPropertyStore config = wmvDec as MediaFoundation.Misc.IPropertyStore; if (config != null) { MediaFoundation.Misc.PropVariant pv = new MediaFoundation.Misc.PropVariant(); //config.GetValue(MediaFoundation.Misc.WMVConst.MFPKEY_DXVA_ENABLED, pv); } } catch (Exception ex) { FileLogger.Log("Error setting DXVA mode for WMV: {0}", ex.Message); } finally { while (Marshal.ReleaseComObject(wmvDec) > 0) ; } } } SetEvrVideoMode(); // QueryInterface for DirectShow interfaces this.mediaControl = (IMediaControl)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; this.mediaPosition = (IMediaPosition)this.graphBuilder; // Query for video interfaces, which may not be relevant for audio files //this.videoWindow = this.graphBuilder as IVideoWindow; //this.basicVideo = this.graphBuilder as IBasicVideo; // Query for audio interfaces, which may not be relevant for video-only files this.basicAudio = this.graphBuilder as IBasicAudio; // Is this an audio-only file (no video component)? CheckVisibility(); // Have the graph signal event via window callbacks for performance hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WM.GRAPH_NOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); if (!this.isAudioOnly) { // Setup the video window //hr = this.videoWindow.put_Owner(this.Handle); //DsError.ThrowExceptionForHR(hr); //this.evrDisplay.SetVideoWindow(this.Handle); //hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); //DsError.ThrowExceptionForHR(hr); hr = InitVideoWindow();//1, 1); DsError.ThrowExceptionForHR(hr); GetFrameStepInterface(); } else { // Initialize the default player size and enable playback menu items hr = InitPlayerWindow(); DsError.ThrowExceptionForHR(hr); EnablePlaybackMenu(true, MediaType.Audio); } // Complete window initialization //CheckSizeMenu(menuFileSizeNormal); //this.isFullScreen = false; this.currentPlaybackRate = 1.0; UpdateMainTitle(); this.Activate(); //pre-roll the graph hr = this.mediaControl.Pause(); DsError.ThrowExceptionForHR(hr); if (wmReader != null) { WindowsMediaLib.PlayMode pMode; hr = wmReader.GetPlayMode(out pMode); DsError.ThrowExceptionForHR(hr); if (pMode == WindowsMediaLib.PlayMode.Streaming) { int pdwPercent = 0; long pcnsBuffering; while (pdwPercent < 100) { hr = wmReader.GetBufferProgress(out pdwPercent, out pcnsBuffering); DsError.ThrowExceptionForHR(hr); if (pdwPercent >= 100) break; int sleepFor = Convert.ToInt32(pcnsBuffering / 1000); Thread.Sleep(100); } } } // Run the graph to play the media file hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); if (commWatcher != null) commWatcher.Dispose(); string commPath = string.Empty; if (ps.UseDtbXml) { commWatcher = new FileSystemWatcher(Commercials.XmlDirectory, Commercials.GetXmlFilename(filename)); commPath = Path.Combine(Commercials.XmlDirectory, Commercials.GetXmlFilename(filename)); } else { commWatcher = new FileSystemWatcher(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); commPath = Path.Combine(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); } ReadComm(commPath); commWatcher.Changed += new FileSystemEventHandler(commWatcher_Changed); commWatcher.Created += new FileSystemEventHandler(commWatcher_Changed); //commWatcher.NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.Size; commWatcher.EnableRaisingEvents = true; MoveToBookmark(); this.currentState = PlayState.Running; if (isFullScreen) tmMouseMove.Enabled = true; } else { //MessageBox.Show("EVR cannot be loaded on this PC"); using (EPDialog ed = new EPDialog()) ed.ShowDialog("Error", "The Enhanced Video Renderer cannot be loaded", 20, this); } } finally { //if (wmReader != null) // Marshal.ReleaseComObject(wmReader); if (sourceFilter != null) while(Marshal.ReleaseComObject(sourceFilter)>0); } }