private void BuildGraph(string fileName) { int hr = 0; try { graphBuilder = (IFilterGraph2)new FilterGraph(); mediaControl = (IMediaControl)graphBuilder; mediaSeeking = (IMediaSeeking)graphBuilder; mediaPosition = (IMediaPosition)graphBuilder; vmr9 = (IBaseFilter)new VideoMixingRenderer9(); ConfigureVMR9InWindowlessMode(); hr = graphBuilder.AddFilter(vmr9, "Video Mixing Renderer 9"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.RenderFile(fileName, null); DsError.ThrowExceptionForHR(hr); } catch (Exception e) { CloseInterfaces(); MessageBox.Show("An error occured during the graph building : \r\n\r\n" + e.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public static void Run() { var fg = FilterGraph.Create(); var mc = (IMediaControl)fg; var me = (IMediaEvent)fg; fg.RenderFile("c:\\test.mp3", IntPtr.Zero); IEnumFilters ief; var filters = new IBaseFilter[8]; fg.EnumFilters(out ief); int fetched; ief.Next(8, filters, out fetched); for (int i = 0; i < fetched; i++) { var ibf = filters[i]; FilterInfo fi; ibf.QueryFilterInfo(out fi); string vendorInfo = ""; try { ibf.QueryVendorInfo(out vendorInfo); } catch (Exception) { } Console.WriteLine(fi.Name + " " + vendorInfo); } Console.ReadLine(); }
/// <summary> /// Creates the object that implements the IQuality interface /// </summary> public static IQuality createQualityControl(Configuration configuration, IBaseFilter filterVideoEncoder, IBaseFilter filterCapture, IBaseFilter filterMultiplexer, IBaseFilter filterVideoCompressor) { ICodecAPI codecAPI = checkCodecAPI(filterVideoEncoder, filterCapture, filterMultiplexer, filterVideoCompressor); if (codecAPI != null) { return new CodecAPIControl(configuration, codecAPI); } IVideoEncoder videoEncoder = checkVideoEncoder(filterVideoEncoder, filterCapture, filterMultiplexer, filterVideoCompressor); if (videoEncoder != null) { return new VideoEncoderControl(configuration, videoEncoder); } #pragma warning disable 618,612 IEncoderAPI encoderAPI = checkEncoderAPI(filterVideoEncoder, filterCapture, filterMultiplexer, filterVideoCompressor); if (encoderAPI != null) { return new EncoderAPIControl(configuration, encoderAPI); } #pragma warning restore 618,612 return null; }
#pragma warning disable 618,612 private static IEncoderAPI checkEncoderAPI(IBaseFilter filterVideoEncoder, IBaseFilter filterCapture, IBaseFilter filterMultiplexer, IBaseFilter filterVideoCompressor) { IEncoderAPI videoEncoder = null; if (filterVideoEncoder != null) { videoEncoder = filterVideoEncoder as IEncoderAPI; } if (videoEncoder == null && filterCapture != null) { videoEncoder = filterCapture as IEncoderAPI; } if (videoEncoder == null && filterMultiplexer != null) { videoEncoder = filterMultiplexer as IEncoderAPI; } if (videoEncoder == null && filterVideoCompressor != null) { videoEncoder = filterVideoCompressor as IEncoderAPI; } return videoEncoder; }
protected virtual void SetupAudio() { int hr; IEnumFilters enumFilters; hr = _graph.EnumFilters(out enumFilters); DsError.ThrowExceptionForHR(hr); IBaseFilter[] filters = new IBaseFilter[1]; IntPtr fetched = new IntPtr(); while (enumFilters.Next(1, filters, fetched) == 0) { IBaseFilter filter = filters[0] as IBaseFilter; IPin unconnectedPin = DsFindPin.ByConnectionStatus((IBaseFilter)filter, PinConnectedStatus.Unconnected, 0); if (unconnectedPin != null) { PinDirection direction; hr = unconnectedPin.QueryDirection(out direction); DsError.ThrowExceptionForHR(hr); if (direction == PinDirection.Output) { hr = _graph.Render(unconnectedPin); DsError.ThrowExceptionForHR(hr); SetupSampleGrabber(); } } } }
public static int ConnectFilter(IGraphBuilder graph, IBaseFilter up, IBaseFilter down) { IPin pinSrc = null; int i = 0; while ( (pinSrc = DsFindPin.ByDirection(up, PinDirection.Output, i++) ) != null ) { IPin pinDest = null; int j = 0; while((pinDest = DsFindPin.ByDirection(down, PinDirection.Input, j++)) != null) { try { ConnectFilters(graph, pinSrc, pinDest, true); } catch(Exception e) { Marshal.FinalReleaseComObject(pinDest); continue; } //�ɹ� Marshal.FinalReleaseComObject(pinSrc); Marshal.FinalReleaseComObject(pinDest); return 0; } Marshal.FinalReleaseComObject(pinSrc); } return -1; }
/// <summary> /// Initializes a new instance of the <see cref="GenericATSC"/> class. /// </summary> /// <param name="tunerFilter">The tuner filter.</param> public GenericATSC(IBaseFilter tunerFilter) { IPin pin = DsFindPin.ByName(tunerFilter, "MPEG2 Transport"); if (pin != null) { _propertySet = pin as IKsPropertySet; if (_propertySet != null) { KSPropertySupport supported; _propertySet.QuerySupported(guidBdaDigitalDemodulator, (int)BdaDigitalModulator.MODULATION_TYPE, out supported); if ((supported & KSPropertySupport.Set) != 0) { Log.Log.Debug("GenericATSC: QAM capable card found!"); _isGenericATSC = true; _tempValue = Marshal.AllocCoTaskMem(1024); _tempInstance = Marshal.AllocCoTaskMem(1024); } else { Log.Log.Debug("GenericATSC: QAM card NOT found!"); _isGenericATSC = false; } } } else Log.Log.Info("GenericATSC: tuner pin not found!"); }
public int Set(IBaseFilter pCaptureDevice, string strCaptureVideoOut) { if (_class.Var.VideoResolutionIndex < _class.Resolution.List.Count) { _class.Debug.Log("[3] set resolution " + _class.Resolution.List[_class.Var.VideoResolutionIndex]); _class.Graph.Resolution = _class.Resolution.Type[_class.Var.VideoResolutionIndex]; var hr = ((IAMStreamConfig)_class.GraphPin.GetPin(pCaptureDevice, strCaptureVideoOut)).SetFormat(_class.Resolution.Type[_class.Var.VideoResolutionIndex]); if (hr == 0) { _class.Debug.Log("[OK] Set resolution " + _class.Resolution.List[_class.Var.VideoResolutionIndex]); _class.Var.CurrentResolution = _class.Var.VideoResolutionIndex; _class.Var.CurrentResByName = _class.Resolution.List[_class.Var.VideoResolutionIndex]; if (_class.Var.CurrentResByName.IndexOf('[') > -1) _class.Var.CurrentResByName = _class.Var.CurrentResByName.Substring(0, _class.Var.CurrentResByName.IndexOf('[')); } else { _class.Debug.Log("[NG] Can't set resolution " + _class.Resolution.List[_class.Var.VideoResolutionIndex]); _class.Debug.Log("-> " + DsError.GetErrorText(hr)); } return hr; } else _class.Debug.Log("[0] [ERR] cant find resolution " + _class.Var.VideoResolutionIndex); return -99999; }
/// <summary> /// Initializes a new instance of the <see cref="ViXSATSC"/> class. /// </summary> /// <param name="tunerFilter">The tuner filter.</param> public ViXSATSC(IBaseFilter tunerFilter) { IPin pin = DsFindPin.ByName(tunerFilter, "MPEG2 Transport"); if (pin != null) { _propertySet = tunerFilter as IKsPropertySet; if (_propertySet != null) { KSPropertySupport supported; _propertySet.QuerySupported(guidViXSTunerExtention, (int)BdaDigitalModulator.MODULATION_TYPE, out supported); if ((supported & KSPropertySupport.Set) != 0) { Log.Log.Debug("ViXS ATSC: DVB-S card found!"); _tempValue = Marshal.AllocCoTaskMem(1024); _isViXSATSC = true; } else { Log.Log.Debug("ViXS ATSC: card NOT found!"); _isViXSATSC = false; } } } else Log.Log.Info("ViXS ATSC: could not find MPEG2 Transport pin!"); }
/// <summary> /// Get filter's pin. /// </summary> /// /// <param name="filter">Filter to get pin of.</param> /// <param name="dir">Pin's direction.</param> /// <param name="num">Pin's number.</param> /// /// <returns>Returns filter's pin.</returns> /// public static IPin GetPin( IBaseFilter filter, PinDirection dir, int num ) { IPin[] pin = new IPin[1]; IEnumPins pinsEnum = null; // enum filter pins if ( filter.EnumPins( out pinsEnum ) == 0 ) { PinDirection pinDir; int n; // get next pin while ( pinsEnum.Next( 1, pin, out n ) == 0 ) { // query pin`s direction pin[0].QueryDirection( out pinDir ); if ( pinDir == dir ) { if ( num == 0 ) return pin[0]; num--; } Marshal.ReleaseComObject( pin[0] ); pin[0] = null; } } return null; }
internal TBSDVBS2Handler(IBaseFilter tunerFilter, Tuner tuner) { if (!tuner.Name.ToUpperInvariant().Contains("TBS")) return; IPin pin = DsFindPin.ByDirection(tunerFilter, PinDirection.Input, 0); if (pin != null) { propertySet = pin as IKsPropertySet; if (propertySet != null) { KSPropertySupport supported; reply = propertySet.QuerySupported(bdaTunerExtensionProperties, (int)BdaTunerExtension.KSPROPERTY_BDA_NBC_PARAMS, out supported); if (reply == 0) { dvbs2Capable = (supported & KSPropertySupport.Get) == KSPropertySupport.Get || (supported & KSPropertySupport.Set) == KSPropertySupport.Set; if (dvbs2Capable) { useSet = (supported & KSPropertySupport.Set) == KSPropertySupport.Set; useGet = !useSet; } } } } }
public void createSmartTee(ref string strPreviewIn, ref string strPreviewOut, ref string strDevice, ref string strPinOut, ref IBaseFilter pRen) { int hr = 0; _class.Debug.Log(""); _class.Debug.Log("Creating SmartTee Preview Filter"); IBaseFilter pSmartTee2 = (IBaseFilter)new DirectShowLib.SmartTee(); hr = _class.Graph.CaptureGraph.AddFilter(pSmartTee2, "Smart Tee"); _class.Debug.Log(DsError.GetErrorText(hr)); _class.Debug.Log(""); _class.GraphPin.ListPin(pSmartTee2); strPreviewIn = _class.GraphPin.AssumePinIn("Input"); strPreviewOut = _class.GraphPin.AssumePinOut("Preview"); _class.Debug.Log(""); _class.Debug.Log("*** Connect " + strDevice + " (" + strPinOut + ") to SmartTee Preview Filter (" + strPreviewIn + ")"); hr = _class.Graph.CaptureGraph.ConnectDirect(_class.GraphPin.GetPin(pRen, strPinOut), _class.GraphPin.GetPin(pSmartTee2, strPreviewIn), null); if (hr == 0) { _class.Debug.Log("[OK] Connected " + strDevice + " to SmartTee Preview Filter"); strDevice = "SmartTee Preview Filter"; pRen = pSmartTee2; strPinOut = strPreviewOut; } else { _class.Debug.Log("[NG] cant Connect " + strDevice + " to Preview Filter. Attempting to continue without preview"); _class.Debug.Log("-> " + DsError.GetErrorText(hr)); } }
/// <summary> /// Initializes a new instance of the <see cref="Hauppauge"/> class. /// </summary> /// <param name="tunerFilter">The tuner filter.</param> public Hauppauge(IBaseFilter tunerFilter) { IPin pin = DsFindPin.ByDirection(tunerFilter, PinDirection.Input, 0); if (pin != null) { _propertySet = pin as IKsPropertySet; if (_propertySet != null) { KSPropertySupport supported; _propertySet.QuerySupported(BdaTunerExtentionProperties, (int)BdaTunerExtension.KSPROPERTY_BDA_DISEQC, out supported); if ((supported & KSPropertySupport.Set) != 0) { Log.Log.Debug("Hauppauge: DVB-S card found!"); _isHauppauge = true; _ptrDiseqc = Marshal.AllocCoTaskMem(1024); _tempValue = Marshal.AllocCoTaskMem(1024); _tempInstance = Marshal.AllocCoTaskMem(1024); } else { Log.Log.Debug("Hauppauge: DVB-S card NOT found!"); _isHauppauge = false; Dispose(); } } } else Log.Log.Info("Hauppauge: tuner pin not found!"); }
public TwinhanDVBS2Handler(IBaseFilter filter) { captureFilter = filter; if (filter != null) dvbs2Capable = checkTwinhanInterface(); }
public void Create(ref string strAVIin, ref string strAVIout, ref string strDevice, ref string strPinOut, ref IBaseFilter pRen) { var hr = 0; _class.Debug.Log(""); _class.Debug.Log("Creating AVI renderer"); var pAviDecompressor = (IBaseFilter) new AVIDec(); hr = _class.Graph.CaptureGraph.AddFilter(pAviDecompressor, "AVI Decompressor"); _class.Debug.Log("-> " + DsError.GetErrorText(hr)); _class.GraphPin.ListPin(pAviDecompressor); strAVIin = _class.GraphPin.AssumePinIn("XForm"); strAVIout = _class.GraphPin.AssumePinOut("XForm"); _class.Debug.Log(""); _class.Debug.Log("*** Connect " + strDevice + " (" + strPinOut + ") to AVI Decompressor (" + strAVIin + ")"); hr = _class.Graph.CaptureGraph.ConnectDirect(_class.GraphPin.GetPin(pRen, strPinOut), _class.GraphPin.GetPin(pAviDecompressor, strAVIin), null); if (hr == 0) { _class.Debug.Log("[OK] Connected " + strDevice + " to AVI Decompressor"); pRen = pAviDecompressor; strDevice = "AVI Decompressor"; strPinOut = strAVIout; } else { _class.Debug.Log("[FAIL] Can't connected " + strDevice + " to AVI Decompressor. May interrupt operation"); } }
//Initializes the Hauppauge interfaces /// <summary> /// Constructor: Require the Hauppauge capture filter, and the deviceid for the card to be passed in /// </summary> public Hauppauge(IBaseFilter filter, string tuner) { try { //Don't create the class if we don't have any filter; if (filter == null) { return; } //Load Library hauppaugelib = LoadLibrary("hauppauge.dll"); //Get Proc addresses, and set the delegates for each function IntPtr procaddr = GetProcAddress(hauppaugelib, "Init"); _Init = (Init)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (Init)); procaddr = GetProcAddress(hauppaugelib, "DeInit"); _DeInit = (DeInit)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (DeInit)); procaddr = GetProcAddress(hauppaugelib, "IsHauppauge"); _IsHauppauge = (IsHauppauge)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (IsHauppauge)); procaddr = GetProcAddress(hauppaugelib, "SetVidBitRate"); _SetVidBitRate = (SetVidBitRate)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (SetVidBitRate)); procaddr = GetProcAddress(hauppaugelib, "GetVidBitRate"); _GetVidBitRate = (GetVidBitRate)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (GetVidBitRate)); procaddr = GetProcAddress(hauppaugelib, "SetAudBitRate"); _SetAudBitRate = (SetAudBitRate)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (SetAudBitRate)); procaddr = GetProcAddress(hauppaugelib, "GetAudBitRate"); _GetAudBitRate = (GetAudBitRate)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (GetAudBitRate)); procaddr = GetProcAddress(hauppaugelib, "SetStreamType"); _SetStreamType = (SetStreamType)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (SetStreamType)); procaddr = GetProcAddress(hauppaugelib, "GetStreamType"); _GetStreamType = (GetStreamType)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (GetStreamType)); procaddr = GetProcAddress(hauppaugelib, "SetDNRFilter"); _SetDNRFilter = (SetDNRFilter)Marshal.GetDelegateForFunctionPointer(procaddr, typeof (SetDNRFilter)); //Hack //The following is strangely necessary when using delegates instead of P/Invoke - linked to MP using utf-8 //Hack byte[] encodedstring = Encoding.UTF32.GetBytes(tuner); string card = Encoding.Unicode.GetString(encodedstring); hr = new HResult(_Init(filter, card)); Log.Log.WriteFile("Hauppauge Quality Control Initializing " + hr.ToDXString()); } catch (Exception ex) { Log.Log.WriteFile("Hauppauge Init failed " + ex.Message); } }
private static bool ConnectFilter(IFilterGraph2 graphBuilder, IBaseFilter networkFilter, IBaseFilter tunerFilter) { IPin pinOut = DsFindPin.ByDirection(networkFilter, PinDirection.Output, 0); IPin pinIn = DsFindPin.ByDirection(tunerFilter, PinDirection.Input, 0); int hr = graphBuilder.Connect(pinOut, pinIn); return (hr == 0); }
/// <summary> /// Erzeugt eine neue Beschreibung. /// </summary> /// <param name="direction">Die Übertragunsrichtung des Endpunktes.</param> /// <param name="name">Der eindeutige Name des Endpunktes.</param> /// <param name="filter">Der Filter, zu dem dieser Endpunkt gehört.</param> public PinInfo( PinDirection direction, string name, IBaseFilter filter ) { // Remember all Filter = Marshal.GetComInterfaceForObject( filter, typeof( IBaseFilter ) ); Direction = direction; Name = name; }
internal TechnoTrendDVBS2Handler(IBaseFilter tunerFilter) { deviceCategory category = getDeviceType(tunerFilter); if (category == deviceCategory.UNKNOWN) return; dvbs2Capable = true; }
public Filter(IBaseFilter filter) { BaseFilter = filter; BaseFilter.QueryFilterInfo(out GetFilterInfo); Streams = new ArrayList { }; getStreams(); }
public WavFileRenderer(ITimeline timeline, string outputFile, IBaseFilter audioCompressor, AMMediaType mediaType, ICallbackParticipant[] audioParticipants) : base(timeline) { RenderToWavDest(outputFile, audioCompressor, mediaType, audioParticipants); ChangeState(RendererState.Initialized); }
public int AddFilter(string filterName, IBaseFilter pFilter) { int hr = MSStatus.MS_E_HANDLE; if (IsValid && (_pGB != null)) { hr = _pGB.AddFilter(pFilter, filterName); } return hr; }
private void RenderToWavDest( string outputFile, IBaseFilter audioCompressor, AMMediaType mediaType, ICallbackParticipant[] audioParticipants) { if (audioCompressor != null) Cleanup.Add(audioCompressor); int hr; if (FirstAudioGroup == null) { throw new SplicerException(Resources.ErrorNoAudioStreamToRender); } if (outputFile == null) { throw new SplicerException(Resources.ErrorInvalidOutputFileName); } // Contains useful routines for creating the graph var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); Cleanup.Add(graphBuilder); try { hr = graphBuilder.SetFiltergraph(Graph); DESError.ThrowExceptionForHR(hr); IBaseFilter wavDestFilter = StandardFilters.RenderWavDestination(Cleanup, Graph); IBaseFilter fileSink = StandardFilters.RenderFileDestination(Cleanup, Graph, outputFile); try { RenderGroups(graphBuilder, audioCompressor, null, wavDestFilter, audioParticipants, null); FilterGraphTools.ConnectFilters(Graph, wavDestFilter, fileSink, true); // if supplied, apply the media type to the filter if (mediaType != null) { FilterGraphTools.SetFilterFormat(mediaType, audioCompressor); } DisableClock(); } finally { if (wavDestFilter != null) Marshal.ReleaseComObject(wavDestFilter); if (fileSink != null) Marshal.ReleaseComObject(fileSink); } } finally { Marshal.ReleaseComObject(graphBuilder); } }
/// <summary> /// Initialize a new instance of the TechnoTrendDiseqcHandler class. /// </summary> /// <param name="tunerFilter">tunerfilter</param> public TechnoTrendDiseqcHandler(IBaseFilter tunerFilter) { deviceCategory category = getDeviceType(tunerFilter); if (category == deviceCategory.UNKNOWN) return; handle = bdaapiOpenHWIdx(getDeviceType(tunerFilter), getDeviceID(tunerFilter)); cardCapable = (handle.ToInt32() != -1); }
///<summary> /// WinTV CI control ///</summary> ///<param name="winTvUsbCIFilter">WinTV CI filter</param> public WinTvCiModule(IBaseFilter winTvUsbCIFilter) { _winTvUsbCIFilter = winTvUsbCIFilter; cbOnAPDU = OnAPDU; cbOnStatus = OnStatus; cbOnCamInfo = OnCamInfo; cbOnCloseMMI = OnMMIClosed; MMI = new DVB_MMI_Handler("WinTvCI"); // callbacks are set on first access }
public AviFileRenderer(ITimeline timeline, string outputFile, IBaseFilter videoCompressor, IBaseFilter audioCompressor, ICallbackParticipant[] videoParticipants, ICallbackParticipant[] audioParticipants) : base(timeline) { RenderToAVI(outputFile, videoCompressor, audioCompressor, videoParticipants, audioParticipants); ChangeState(RendererState.Initialized); }
public VideoOutPinConfiguration( IBaseFilter filter, IPin pin, int format_id, VideoInfoHeader header ) { this.filter = filter; this.pin = pin; this.width = header.BmiHeader.Width; this.height = header.BmiHeader.Height; this.fps = 10000000 / header.AvgTimePerFrame; this.format_id = format_id; }
public static IPin FindPin(IBaseFilter filter, PinDirection direction, Guid mediaType, Guid pinCategory, string preferredName) { if (Guid.Empty != pinCategory) { int idx = 0; do { IPin pinByCategory = DsFindPin.ByCategory(filter, pinCategory, idx); if (pinByCategory != null) { if (IsMatchingPin(pinByCategory, direction, mediaType)) return PrintInfoAndReturnPin(filter, pinByCategory, direction, mediaType, pinCategory, "found by category"); Marshal.ReleaseComObject(pinByCategory); } else break; idx++; } while (true); } if (!string.IsNullOrEmpty(preferredName)) { IPin pinByName = DsFindPin.ByName(filter, preferredName); if (pinByName != null && IsMatchingPin(pinByName, direction, mediaType)) return PrintInfoAndReturnPin(filter, pinByName, direction, mediaType, pinCategory, "found by name"); Marshal.ReleaseComObject(pinByName); } IEnumPins pinsEnum; IPin[] pins = new IPin[1]; int hr = filter.EnumPins(out pinsEnum); DsError.ThrowExceptionForHR(hr); while (pinsEnum.Next(1, pins, IntPtr.Zero) == 0) { IPin pin = pins[0]; if (pin != null) { if (IsMatchingPin(pin, direction, mediaType)) return PrintInfoAndReturnPin(filter, pin, direction, mediaType, pinCategory, "found by direction and media type"); Marshal.ReleaseComObject(pin); } } return null; }
/// <summary> /// Initializes a new instance of the <see cref="AnalogSubChannel"/> class. /// </summary> internal AnalogSubChannel(TvCardAnalog card, int subchnnelId, TvAudio tvAudio, bool hasTeletext, IBaseFilter mpFileWriter) { _card = card; _hasTeletext = hasTeletext; _tvAudio = tvAudio; _mpFileWriter = mpFileWriter; _mpRecord = (IMPRecord)_mpFileWriter; _mpRecord.AddChannel(ref _subChannelId); _subChannelId = subchnnelId; }
/// <summary> Initialize collection with property pages from existing graph. </summary> internal PropertyPageCollection( ICaptureGraphBuilder2 graphBuilder, IBaseFilter videoDeviceFilter, IBaseFilter audioDeviceFilter, IBaseFilter videoCompressorFilter, IBaseFilter audioCompressorFilter, SourceCollection videoSources, SourceCollection audioSources) { addFromGraph( graphBuilder, videoDeviceFilter, audioDeviceFilter, videoCompressorFilter, audioCompressorFilter, videoSources, audioSources ); }
// Token: 0x06000365 RID: 869 RVA: 0x0001412C File Offset: 0x0001232C internal SourceCollection(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter, bool isVideoDevice) { this.addFromGraph(graphBuilder, deviceFilter, isVideoDevice); }
/// <summary> /// Create a new filter graph and add filters (devices, compressors, misc), /// but leave the filters unconnected. Call RenderGraph() /// to connect the filters. /// </summary> void CreateGraph() { //Skip if already created if ((int)_actualGraphState < (int)GraphState.Created) { // Make a new filter graph _graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Uuid.Clsid.FilterGraph, true)); // Get the Capture Graph Builder var clsid = Uuid.Clsid.CaptureGraphBuilder2; var riid = typeof(ICaptureGraphBuilder2).GUID; _captureGraphBuilder = (ICaptureGraphBuilder2)Workaround.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } var comType = Type.GetTypeFromCLSID(Uuid.Clsid.SampleGrabber); if (comType == null) { throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); } var comObj = Activator.CreateInstance(comType); _sampGrabber = (ISampleGrabber)comObj; _baseGrabFlt = (IBaseFilter)_sampGrabber; var media = new AMMediaType(); // Get the video device and add it to the filter graph if (VideoDevice != null) { _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString); hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media.majorType = Uuid.MediaType.Video; media.subType = Uuid.MediaSubType.Rgb32; //RGB24; media.formatType = Uuid.FormatType.VideoInfo; media.temporalCompression = true; //New hr = _sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type var cat = Uuid.PinCategory.Capture; var med = Uuid.MediaType.Interleaved; var iid = typeof(IAMStreamConfig).GUID; hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out var o); if (hr != 0) { // If not found, try looking for a video media type med = Uuid.MediaType.Video; hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out o); if (hr != 0) { // ReSharper disable once RedundantAssignment o = null; } } //VideoStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) _mediaControl = (IMediaControl)_graphBuilder; // Reload any video crossbars //if (videoSources != null) videoSources.Dispose(); videoSources = null; _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = _sampGrabber.SetBufferSamples(true); if (hr == 0) { hr = _sampGrabber.SetOneShot(false); } if (hr == 0) { hr = _sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Update the state now that we are done _actualGraphState = GraphState.Created; }
public static IPin GetOutPin(IBaseFilter filter, int num) { return(GetPin(filter, PinDirection.Output, num)); }
private void WorkerThread(bool runGraph) { // grabber Grabber grabber = new Grabber(this); // objects object captureGraphObject = null; object graphObject = null; object grabberObject = null; // interfaces ICaptureGraphBuilder2 captureGraph = null; IFilterGraph2 graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; try { // get type of capture graph builder Type type = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (type == null) { throw new ApplicationException("Failed creating capture graph builder"); } // create capture graph builder captureGraphObject = Activator.CreateInstance(type); captureGraph = (ICaptureGraphBuilder2)captureGraphObject; // get type of filter graph type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IFilterGraph2)graphObject; // set filter graph to the capture graph builder captureGraph.SetFiltergraph((IGraphBuilder)graph); // create source device's object sourceObject = FilterInfo.CreateFilter(deviceMoniker); if (sourceObject == null) { throw new ApplicationException("Failed creating device object for moniker"); } // get base filter interface of source device sourceBase = (IBaseFilter)sourceObject; // get type of sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add source and grabber filters to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // check if it is required to change capture settings if ((desiredFrameRate != 0) || ((desiredFrameSize.Width != 0) && (desiredFrameSize.Height != 0))) { object streamConfigObject; // get stream configuration object captureGraph.FindInterface(PinCategory.Capture, MediaType.Video, sourceBase, typeof(IAMStreamConfig).GUID, out streamConfigObject); if (streamConfigObject != null) { IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObject; if (videoCapabilities == null) { // get all video capabilities try { videoCapabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig); } catch { } } // get current format streamConfig.GetFormat(out mediaType); VideoInfoHeader infoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); // change frame size if required if ((desiredFrameSize.Width != 0) && (desiredFrameSize.Height != 0)) { infoHeader.BmiHeader.Width = desiredFrameSize.Width; infoHeader.BmiHeader.Height = desiredFrameSize.Height; } // change frame rate if required if (desiredFrameRate != 0) { infoHeader.AverageTimePerFrame = 10000000 / desiredFrameRate; } // copy the media structure back Marshal.StructureToPtr(infoHeader, mediaType.FormatPtr, false); // set the new format streamConfig.SetFormat(mediaType); mediaType.Dispose( ); } } else { if (videoCapabilities == null) { object streamConfigObject; // get stream configuration object captureGraph.FindInterface(PinCategory.Capture, MediaType.Video, sourceBase, typeof(IAMStreamConfig).GUID, out streamConfigObject); if (streamConfigObject != null) { IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObject; // get all video capabilities try { videoCapabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig); } catch { } } } } if (runGraph) { // render source device on sample grabber captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, sourceBase, null, grabberBase); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // get media control mediaControl = (IMediaControl)graphObject; // run mediaControl.Run( ); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); if (needToDisplayPropertyPage) { needToDisplayPropertyPage = false; try { // retrieve ISpecifyPropertyPages interface of the device ISpecifyPropertyPages pPropPages = (ISpecifyPropertyPages)sourceObject; // get property pages from the property bag CAUUID caGUID; pPropPages.GetPages(out caGUID); // get filter info FilterInfo filterInfo = new FilterInfo(deviceMoniker); // create and display the OlePropertyFrame Win32.OleCreatePropertyFrame(parentWindowForPropertyPage, 0, 0, filterInfo.Name, 1, ref sourceObject, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero); // release COM objects Marshal.FreeCoTaskMem(caGUID.pElems); } catch { } } } mediaControl.StopWhenReady( ); } } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects captureGraph = null; graph = null; sourceBase = null; grabberBase = null; sampleGrabber = null; mediaControl = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } if (captureGraphObject != null) { Marshal.ReleaseComObject(captureGraphObject); captureGraphObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, ReasonToFinishPlaying.StoppedByUser); } }
protected void destroyGraph() { try { this.derenderGraph(); } catch { } this.graphState = GraphState.Null; this.isCaptureRendered = false; this.isPreviewRendered = false; if (this.rotCookie != 0) { DsROT.RemoveGraphFromRot(ref this.rotCookie); this.rotCookie = 0; } if (this.muxFilter != null) { this.graphBuilder.RemoveFilter(this.muxFilter); } if (this.baseGrabFlt != null) { this.graphBuilder.RemoveFilter(this.baseGrabFlt); } if (this.videoCompressorFilter != null) { this.graphBuilder.RemoveFilter(this.videoCompressorFilter); } if (this.audioCompressorFilter != null) { this.graphBuilder.RemoveFilter(this.audioCompressorFilter); } if (this.videoDeviceFilter != null) { this.graphBuilder.RemoveFilter(this.videoDeviceFilter); } if (this.audioDeviceFilter != null) { this.graphBuilder.RemoveFilter(this.audioDeviceFilter); } if (this.videoSources != null) { this.videoSources.Dispose(); } this.videoSources = null; if (this.audioSources != null) { this.audioSources.Dispose(); } this.audioSources = null; if (this.propertyPages != null) { this.propertyPages.Dispose(); } this.propertyPages = null; if (this.tuner != null) { this.tuner.Dispose(); } this.tuner = null; if (this.graphBuilder != null) { Marshal.ReleaseComObject(this.graphBuilder); } this.graphBuilder = null; if (this.captureGraphBuilder != null) { Marshal.ReleaseComObject(this.captureGraphBuilder); } this.captureGraphBuilder = null; if (this.muxFilter != null) { Marshal.ReleaseComObject(this.muxFilter); } this.muxFilter = null; if (this.baseGrabFlt != null) { Marshal.ReleaseComObject(this.baseGrabFlt); } this.baseGrabFlt = null; if (this.fileWriterFilter != null) { Marshal.ReleaseComObject(this.fileWriterFilter); } this.fileWriterFilter = null; if (this.videoDeviceFilter != null) { Marshal.ReleaseComObject(this.videoDeviceFilter); } this.videoDeviceFilter = null; if (this.audioDeviceFilter != null) { Marshal.ReleaseComObject(this.audioDeviceFilter); } this.audioDeviceFilter = null; if (this.videoCompressorFilter != null) { Marshal.ReleaseComObject(this.videoCompressorFilter); } this.videoCompressorFilter = null; if (this.audioCompressorFilter != null) { Marshal.ReleaseComObject(this.audioCompressorFilter); } this.audioCompressorFilter = null; this.mediaControl = null; this.videoWindow = null; GC.Collect(); }
public BaseFilterCreator(string name, IBaseFilter baseFilter, Action <IBaseFilter> postAction = null) : base(name, postAction) { BaseFilter = baseFilter; }
internal static void _DisplayPropertyPage(object filter_or_pin, IntPtr hwndOwner) { if (filter_or_pin == null) { return; } //Get the ISpecifyPropertyPages for the filter ISpecifyPropertyPages pProp = filter_or_pin as ISpecifyPropertyPages; int hr = 0; if (pProp == null) { //If the filter doesn't implement ISpecifyPropertyPages, try displaying IAMVfwCompressDialogs instead! IAMVfwCompressDialogs compressDialog = filter_or_pin as IAMVfwCompressDialogs; if (compressDialog != null) { hr = compressDialog.ShowDialog(VfwCompressDialogs.Config, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); } return; } string caption = string.Empty; if (filter_or_pin is IBaseFilter) { //Get the name of the filter from the FilterInfo struct IBaseFilter as_filter = filter_or_pin as IBaseFilter; FilterInfo filterInfo; hr = as_filter.QueryFilterInfo(out filterInfo); DsError.ThrowExceptionForHR(hr); caption = filterInfo.achName; if (filterInfo.pGraph != null) { Marshal.ReleaseComObject(filterInfo.pGraph); } } else if (filter_or_pin is IPin) { //Get the name of the filter from the FilterInfo struct IPin as_pin = filter_or_pin as IPin; PinInfo pinInfo; hr = as_pin.QueryPinInfo(out pinInfo); DsError.ThrowExceptionForHR(hr); caption = pinInfo.name; } // Get the propertypages from the property bag DsCAUUID caGUID; hr = pProp.GetPages(out caGUID); DsError.ThrowExceptionForHR(hr); // Create and display the OlePropertyFrame object oDevice = (object)filter_or_pin; hr = NativeMethodes.OleCreatePropertyFrame(hwndOwner, 0, 0, caption, 1, ref oDevice, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); // Release COM objects Marshal.FreeCoTaskMem(caGUID.pElems); Marshal.ReleaseComObject(pProp); }
private void CaptureVideo() { int retVal; graph = (IGraphBuilder) new FilterGraph(); capture = (ICaptureGraphBuilder2) new CaptureGraphBuilder(); IMediaControl control = (IMediaControl)graph; IMediaEventEx eventEx = (IMediaEventEx)graph; retVal = capture.SetFiltergraph(graph); Dictionary <string, IMoniker> devices = EnumDevices(Clsid.VideoInputDeviceCategory); IMoniker moniker = devices.First().Value; object obj = null; moniker.BindToObject(null, null, typeof(IBaseFilter).GUID, out obj); IBaseFilter baseFilter = (IBaseFilter)obj; retVal = graph.AddFilter(baseFilter, devices.First().Key); Guid CLSID_SampleGrabber = new Guid("{C1F400A0-3F08-11D3-9F0B-006008039E37}"); IBaseFilter grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber)) as IBaseFilter; var media = new AMMediaType(); media.MajorType = MediaType.Video; media.SubType = MediaSubType.RGB24; media.FormatType = FormatType.VideoInfo; retVal = ((ISampleGrabber)grabber).SetMediaType(media); object configObj; retVal = capture.FindInterface(PinCategory.Capture, MediaType.Video, baseFilter, typeof(IAMStreamConfig).GUID, out configObj); IAMStreamConfig config = (IAMStreamConfig)configObj; AMMediaType pmt; retVal = config.GetFormat(out pmt); var header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.FormatPtr, typeof(VideoInfoHeader)); var width = header.BmiHeader.Width; var height = header.BmiHeader.Height; var stride = width * (header.BmiHeader.BitCount / 8); callback = new SampleGrabberCallback() { Width = width, Height = height, Stride = stride }; retVal = ((ISampleGrabber)grabber).SetCallback(callback, 0); retVal = graph.AddFilter(grabber, "SampleGrabber"); IPin output = GetPin(baseFilter, p => p.Name == "Capture"); IPin input = GetPin(grabber, p => p.Name == "Input"); IPin preview = GetPin(grabber, p => p.Name == "Output"); //retVal = graph.ConnectDirect(output, input, pmt); //retVal = graph.Connect(output, input); retVal = capture.RenderStream(PinCategory.Preview, MediaType.Video, baseFilter, grabber, null); //var wih = new WindowInteropHelper(this); var panel = FindName("PART_VideoPanel") as System.Windows.Forms.Panel; IVideoWindow window = (IVideoWindow)graph; retVal = window.put_Owner(panel.Handle); retVal = window.put_WindowStyle(WindowStyles.WS_CHILD | WindowStyles.WS_CLIPCHILDREN); retVal = window.SetWindowPosition(0, 0, (int)panel.ClientSize.Width, (int)panel.ClientSize.Height); retVal = window.put_MessageDrain(panel.Handle); retVal = window.put_Visible(-1); //OATRUE retVal = control.Run(); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } // Learn the video properties SaveSizeInfo(sampGrabber); ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP) { int hr; //IGraphBuilder pBuilder = null; //IBaseFilter pNullF = null; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; m_FilterGraph = new FilterGraph() as IFilterGraph2; try { hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; m_VidControl = null; IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } sampGrabber = new SampleGrabber() as ISampleGrabber; IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); //hr = m_FilterGraph.AddFilter(null, "Null render"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } SaveSizeInfo(sampGrabber); IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
public void CaptureVideo() { pictureBox1.Image = null; int hr = 0; IBaseFilter sourceFilter = null; ISampleGrabber sampleGrabber = null; try { // Get DirectShow interfaces GetInterfaces(); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = FindCaptureDevice(); // Add Capture filter to graph. hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture"); DsError.ThrowExceptionForHR(hr); // Initialize SampleGrabber. sampleGrabber = new SampleGrabber() as ISampleGrabber; // Configure SampleGrabber. Add preview callback. ConfigureSampleGrabber(sampleGrabber); // Add SampleGrabber to graph. hr = this.graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "Frame Callback"); DsError.ThrowExceptionForHR(hr); // Configure preview settings. SetConfigParams(this.captureGraphBuilder, sourceFilter, _previewFPS, _previewWidth, _previewHeight); // Render the preview hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (sampleGrabber as IBaseFilter), null); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampleGrabber); // Set video window style and position SetupVideoWindow(); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); // Start previewing video data hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); } catch { MessageBox.Show("An unrecoverable error has occurred."); } finally { if (sourceFilter != null) { Marshal.ReleaseComObject(sourceFilter); sourceFilter = null; } if (sampleGrabber != null) { Marshal.ReleaseComObject(sampleGrabber); sampleGrabber = null; } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(string FileName) { int hr; // Get the graphbuilder object this.graphBuilder = new FilterGraph() as IGraphBuilder; this.mediaControl = this.graphBuilder as IMediaControl; this.mediaSeeking = this.graphBuilder as IMediaSeeking; this.mediaEvent = this.graphBuilder as IMediaEvent; try { // Get the SampleGrabber interface this.sampleGrabber = new SampleGrabber() as ISampleGrabber; this.sampleGrabberFilter = sampleGrabber as IBaseFilter; ConfigureSampleGrabber(sampleGrabber); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(sampleGrabberFilter, "Ds.NET Sample Grabber"); DsError.ThrowExceptionForHR(hr); IBaseFilter aviSplitter = new AviSplitter() as IBaseFilter; // Add the aviSplitter to the graph hr = graphBuilder.AddFilter(aviSplitter, "Splitter"); DsError.ThrowExceptionForHR(hr); // Have the graph builder construct its appropriate graph automatically hr = this.graphBuilder.RenderFile(FileName, null); DsError.ThrowExceptionForHR(hr); #if DEBUG m_rot = new DsROTEntry(graphBuilder); #endif // Remove the video renderer filter IBaseFilter defaultVideoRenderer = null; graphBuilder.FindFilterByName("Video Renderer", out defaultVideoRenderer); graphBuilder.RemoveFilter(defaultVideoRenderer); // Disconnect anything that is connected // to the output of the sample grabber IPin iPinSampleGrabberOut = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Output, 0); IPin iPinVideoIn; hr = iPinSampleGrabberOut.ConnectedTo(out iPinVideoIn); if (hr == 0) { // Disconnect the sample grabber output from the attached filters hr = iPinVideoIn.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = iPinSampleGrabberOut.Disconnect(); DsError.ThrowExceptionForHR(hr); } else { // Try other way round because automatic renderer could not build // graph including the sample grabber IPin iPinAVISplitterOut = DsFindPin.ByDirection(aviSplitter, PinDirection.Output, 0); IPin iPinAVISplitterIn; hr = iPinAVISplitterOut.ConnectedTo(out iPinAVISplitterIn); DsError.ThrowExceptionForHR(hr); hr = iPinAVISplitterOut.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = iPinAVISplitterIn.Disconnect(); DsError.ThrowExceptionForHR(hr); // Connect the avi splitter output to sample grabber IPin iPinSampleGrabberIn = DsFindPin.ByDirection(sampleGrabberFilter, PinDirection.Input, 0); hr = graphBuilder.Connect(iPinAVISplitterOut, iPinSampleGrabberIn); DsError.ThrowExceptionForHR(hr); } // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = graphBuilder.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // Get the input pin of the null renderer IPin iPinNullRendererIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); // Connect the sample grabber to the null renderer hr = graphBuilder.Connect(iPinSampleGrabberOut, iPinNullRendererIn); DsError.ThrowExceptionForHR(hr); // Read and cache the image sizes SaveSizeInfo(sampleGrabber); this.GetFrameStepInterface(); } finally { } }
public bool LoadSubtitles(IGraphBuilder graphBuilder, string filename) { LoadSettings(); //remove DirectVobSub DirectVobSubUtil.RemoveFromGraph(graphBuilder); { //remove InternalScriptRenderer as it takes subtitle pin IBaseFilter isr = null; DirectShowUtil.FindFilterByClassID(graphBuilder, ClassId.InternalScriptRenderer, out isr); if (isr != null) { graphBuilder.RemoveFilter(isr); DirectShowUtil.ReleaseComObject(isr); } } // Window size //Size size = new Size(GUIGraphicsContext.Width, GUIGraphicsContext.Height); /*List<FFDShowAPI.FFDShowInstance> ffdshowInstance = FFDShowAPI.getFFDShowInstances(); * FFDShowAPI.FFDShowAPI api = new FFDShowAPI();*/ IBaseFilter baseFilter = null; DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoGuid, out baseFilter); if (baseFilter == null) { DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoDXVAGuid, out baseFilter); } if (baseFilter == null) { DirectShowUtil.FindFilterByClassID(graphBuilder, FFDShowAPI.FFDShowVideoRawGuid, out baseFilter); } if (baseFilter == null) { return(false); } ffdshowAPI = new FFDShowAPI((object)baseFilter); IffdshowDec ffdshowDec = baseFilter as IffdshowDec; if (ffdshowDec == null) { Log.Error("FFdshow interfaces not found. Try to update FFDShow"); } else { Log.Info("FFdshow interfaces found"); } if (selectionOff) { Enable = false; } else { Enable = autoShow; } return(true); }
/// <summary> /// /// </summary> /// <param name="capGraph"></param> /// <param name="capFilter"></param> /// <param name="iFrameRate"></param> /// <param name="iWidth"></param> /// <param name="iHeight"></param> /// <param name="subType"></param> private void SetConfigParams(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight, Guid subType) { int hr; object config; AMMediaType mediaType; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out config); IAMStreamConfig videoStreamConfig = config as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } // Get the existing format block hr = videoStreamConfig.GetFormat(out mediaType); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(mediaType.formatPtr, videoInfoHeader); // if overriding the framerate, set the frame rate if (iFrameRate > 0) { videoInfoHeader.AvgTimePerFrame = 10000000 / iFrameRate; } // if overriding the width, set the width if (iWidth > 0) { videoInfoHeader.BmiHeader.Width = iWidth; } // if overriding the Height, set the Height if (iHeight > 0) { videoInfoHeader.BmiHeader.Height = iHeight; } // WORKAROUND If subtype is different from previous configured type, set new one. if (subType != Guid.Empty && subType != mediaType.subType) { // set designated subtype mediaType.subType = subType; } // Copy the media structure back Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false); // Set the new format hr = videoStreamConfig.SetFormat(mediaType); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mediaType); mediaType = null; }
// Thread entry point public void WorkerThread() { // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object sourceObj = null; object grabberObj = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Get type for filter graph Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (srvType == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObj = Activator.CreateInstance(srvType); graph = (IGraphBuilder)graphObj; // ---- UCOMIBindCtx bindCtx = null; UCOMIMoniker moniker = null; int n = 0; // create bind context if (Win32.CreateBindCtx(0, out bindCtx) == 0) { // convert moniker`s string to a moniker if (Win32.MkParseDisplayName(bindCtx, source, ref n, out moniker) == 0) { // get device base filter Guid filterId = typeof(IBaseFilter).GUID; moniker.BindToObject(null, null, ref filterId, out sourceObj); Marshal.ReleaseComObject(moniker); moniker = null; } Marshal.ReleaseComObject(bindCtx); bindCtx = null; } // ---- if ( sourceObj == null) { throw new ApplicationException("Failed creating device object for moniker"); } sourceBase = (IBaseFilter)sourceObj; // Get type for sample grabber srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (srvType == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObj = Activator.CreateInstance(srvType); sg = (ISampleGrabber)grabberObj; grabberBase = (IBaseFilter)grabberObj; // add source filter to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // connect pins if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graph.Render(DSTools.GetOutPin(grabberBase, 0)); // sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); } // finalization block finally { // release all objects mc = null; graph = null; sourceBase = null; grabberBase = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (sourceObj != null) { Marshal.ReleaseComObject(sourceObj); sourceObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
private static extern int EvrInit(IEVRPresentCallback callback, uint dwD3DDevice, IBaseFilter evrFilter, IntPtr monitor, out IntPtr presenterInstance);
/// <summary> /// Internal. Displays a property page for a filter /// </summary> /// <param name="filter">The filter for which to display a property page.</param> /// <param name="hwndOwner">The window handler for to make it parent of property page.</param> internal static void DisplayPropertyPageFilter(IBaseFilter filter, IntPtr hwndOwner) { _DisplayPropertyPage(filter, hwndOwner); }
protected void createGraph() { System.Type typeFromCLSID = null; object obj2 = null; if ((this.videoDevice == null) && (this.audioDevice == null)) { throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n"); } if (this.graphState < GraphState.Created) { object obj3; GC.Collect(); this.graphBuilder = (IGraphBuilder)Activator.CreateInstance(System.Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); Guid clsid = Clsid.CaptureGraphBuilder2; Guid gUID = typeof(ICaptureGraphBuilder2).GUID; this.captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref gUID); typeFromCLSID = System.Type.GetTypeFromCLSID(Clsid.SampleGrabber, true); if (typeFromCLSID == null) { throw new NotImplementedException("DirectShow SampleGrabber not installed/registered"); } obj2 = Activator.CreateInstance(typeFromCLSID); this.sampGrabber = (ISampleGrabber)obj2; obj2 = null; int errorCode = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; pmt.subType = MediaSubType.RGB24; pmt.formatType = FormatType.VideoInfo; errorCode = this.sampGrabber.SetMediaType(pmt); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } if (this.VideoDevice != null) { this.videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(this.VideoDevice.MonikerString); errorCode = this.graphBuilder.AddFilter(this.videoDeviceFilter, "Video Capture Device"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } this.mediaEvt = (IMediaEventEx)this.graphBuilder; this.baseGrabFlt = (IBaseFilter)this.sampGrabber; errorCode = this.graphBuilder.AddFilter(this.baseGrabFlt, "DS.NET Grabber"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } if (this.AudioDevice != null) { this.audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(this.AudioDevice.MonikerString); errorCode = this.graphBuilder.AddFilter(this.audioDeviceFilter, "Audio Capture Device"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } if (this.VideoCompressor != null) { this.videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(this.VideoCompressor.MonikerString); errorCode = this.graphBuilder.AddFilter(this.videoCompressorFilter, "Video Compressor"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } if (this.AudioCompressor != null) { this.audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(this.AudioCompressor.MonikerString); errorCode = this.graphBuilder.AddFilter(this.audioCompressorFilter, "Audio Compressor"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } Guid capture = PinCategory.Capture; Guid interleaved = MediaType.Interleaved; Guid riid = typeof(IAMStreamConfig).GUID; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj3) != 0) { interleaved = MediaType.Video; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj3) != 0) { obj3 = null; } } this.videoStreamConfig = obj3 as IAMStreamConfig; obj3 = null; capture = PinCategory.Capture; interleaved = MediaType.Audio; riid = typeof(IAMStreamConfig).GUID; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.audioDeviceFilter, ref riid, out obj3) != 0) { obj3 = null; } this.audioStreamConfig = obj3 as IAMStreamConfig; this.mediaControl = (IMediaControl)this.graphBuilder; if (this.videoSources != null) { this.videoSources.Dispose(); } this.videoSources = null; if (this.audioSources != null) { this.audioSources.Dispose(); } this.audioSources = null; if (this.propertyPages != null) { this.propertyPages.Dispose(); } this.propertyPages = null; this.videoCaps = null; this.audioCaps = null; obj3 = null; capture = PinCategory.Capture; interleaved = MediaType.Interleaved; riid = typeof(IAMTVTuner).GUID; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj3) != 0) { interleaved = MediaType.Video; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj3) != 0) { obj3 = null; } } IAMTVTuner tuner = obj3 as IAMTVTuner; if (tuner != null) { this.tuner = new Tuner(tuner); } this.graphState = GraphState.Created; } }
/// <summary> /// /// </summary> /// <param name="device"></param> /// <param name="iFrameRate"></param> /// <param name="iWidth"></param> /// <param name="iHeight"></param> /// <param name="grabberCallback"></param> /// <param name="subType"></param> /// <returns></returns> public int CaptureVideo(DsDevice device, int iFrameRate, int iWidth, int iHeight, ISampleGrabberCB grabberCallback, Guid subType) { int hr = 0; IBaseFilter sourceFilter = null; IBaseFilter renderFilter = null; try { // Get DirectShow interfaces hr = OpenInterfaces(); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = SelectCaptureDevice(device); // Add Capture filter to graph. hr = this.graphBuilder.AddFilter(sourceFilter, "DirectShowCam"); DsError.ThrowExceptionForHR(hr); // Configure preview settings. SetConfigParams(this.captureGraphBuilder, sourceFilter, iFrameRate, iWidth, iHeight, subType); // Initialize SampleGrabber. sampleGrabber = new SampleGrabber() as ISampleGrabber; // Configure SampleGrabber. Add preview callback. ConfigureSampleGrabber(sampleGrabber, subType, grabberCallback); // Add SampleGrabber to graph. hr = this.graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "Frame Callback"); DsError.ThrowExceptionForHR(hr); // Add the Null Render to the filter graph renderFilter = new NullRenderer() as IBaseFilter; hr = this.graphBuilder.AddFilter(renderFilter, "NullRenderer"); // Render the preview hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (sampleGrabber as IBaseFilter), renderFilter); //hr = this.captureGraphBuilder.RenderStream( PinCategory.Preview, MediaType.Video, sourceFilter, (sampleGrabber as IBaseFilter), null ); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampleGrabber); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); // Start previewing video data //hr = this.mediaControl.Run(); //DsError.ThrowExceptionForHR( hr ); } catch { ////MessageBox.Show( "An unrecoverable error has occurred." ); } finally { if (sourceFilter != null) { Marshal.ReleaseComObject(sourceFilter); sourceFilter = null; } if (sampleGrabber != null) { Marshal.ReleaseComObject(sampleGrabber); sampleGrabber = null; } } return(hr); }
public BaseFilterEx(IBaseFilter baseFilter) { this.BaseFilter = baseFilter; Init(); }
/// <summary> create the used COM components and get the interfaces. </summary> protected bool GetInterfaces() { VMR9Util.g_vmr9 = null; if (IsRadio == false) { Vmr9 = VMR9Util.g_vmr9 = new VMR9Util(); // switch back to directx fullscreen mode Log.Info("RTSPPlayer: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); } //Type comtype = null; //object comobj = null; DsRect rect = new DsRect(); rect.top = 0; rect.bottom = GUIGraphicsContext.form.Height; rect.left = 0; rect.right = GUIGraphicsContext.form.Width; try { graphBuilder = (IGraphBuilder) new FilterGraph(); Log.Info("RTSPPlayer: add source filter"); if (IsRadio == false) { bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder); if (!AddVMR9) { Log.Error("RTSPPlayer:Failed to add VMR9 to graph"); return(false); } VMR9Util.g_vmr9.Enable(false); } _mpegDemux = (IBaseFilter) new MPEG2Demultiplexer(); graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer"); _rtspSource = (IBaseFilter) new RtpSourceFilter(); int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter"); if (hr != 0) { Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr); return(false); } // add preferred video & audio codecs Log.Info("RTSPPlayer: add video/audio codecs"); string strVideoCodec = ""; string strAudioCodec = ""; string strAudiorenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters string postProcessingFilterSection = "mytv"; using (Settings xmlreader = new MPSettings()) { if (_mediaType == g_Player.MediaType.Video) { strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "movieplayer"; } else { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "mytv"; } enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false); // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } } string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant(); if (IsRadio == false) { if (strVideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); } } if (strAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); } if (enableDvbSubtitles == true) { try { _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder); SubtitleRenderer.GetInstance().SetPlayer(this); dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null)); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]); } if (strAudiorenderer.Length > 0) { audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false); } Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile); IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource; if (interfaceFile == null) { Log.Error("RTSPPlayer:Failed to get IFileSourceFilter"); return(false); } //Log.Info("RTSPPlayer: open file:{0}",filename); hr = interfaceFile.Load(m_strCurrentFile, null); if (hr != 0) { Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr); return(false); } #region connect rtspsource->demux Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux"); IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0); if (pinTsOut == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0); if (pinDemuxIn == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } hr = graphBuilder.Connect(pinTsOut, pinDemuxIn); if (hr != 0) { Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr); return(false); } DirectShowUtil.ReleaseComObject(pinTsOut); DirectShowUtil.ReleaseComObject(pinDemuxIn); #endregion #region render demux output pins if (IsRadio) { Log.Info("RTSPPlayer:render audio demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { graphBuilder.Render(pins[0]); break; } } } } else { Log.Info("RTSPPlayer:render audio/video demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } graphBuilder.Render(pins[0]); } } #endregion // Connect DVB subtitle filter pins in the graph if (_mpegDemux != null && enableDvbSubtitles == true) { IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer; hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr); if (hr == 0) { Log.Info("RTSPPlayer:_pinPcr OK"); IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr"); IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr"); hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr); } else { Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle); if (hr == 0) { Log.Info("RTSPPlayer:_pinSubtitle OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT); if (hr == 0) { Log.Info("RTSPPlayer:_pinPMT OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr); } } if (IsRadio == false) { if (!VMR9Util.g_vmr9.IsVMR9Connected) { //VMR9 is not supported, switch to overlay Log.Info("RTSPPlayer: vmr9 not connected"); _mediaCtrl = null; Cleanup(); return(false); } VMR9Util.g_vmr9.SetDeinterlaceMode(); } _mediaCtrl = (IMediaControl)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; _mediaSeeking = (IMediaSeeking)graphBuilder; mediaPos = (IMediaPosition)graphBuilder; basicAudio = graphBuilder as IBasicAudio; //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched); DirectShowUtil.EnableDeInterlace(graphBuilder); if (VMR9Util.g_vmr9 != null) { m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth; m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight; } if (audioRendererFilter != null) { Log.Info("RTSPPlayer9:set reference clock"); IMediaFilter mp = graphBuilder as IMediaFilter; IReferenceClock clock = audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr); } Log.Info("RTSPPlayer: graph build successfull"); return(true); } catch (Exception ex) { Error.SetError("Unable to play movie", "Unable build graph for VMR9"); Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); CloseInterfaces(); return(false); } }
/// <summary> /// Reconnects all filters in graph. /// </summary> /// <param name="graphBuilder">IGraphBuilder</param> /// <param name="filter">Current IBaseFilter in graph</param> static void ReConnectAll(IGraphBuilder graphBuilder, IBaseFilter filter) { IEnumPins pinEnum; FilterInfo info = FilterGraphTools.QueryFilterInfoAndFree(filter); IntPtr ptrFetched = Marshal.AllocCoTaskMem(4); int hr = filter.EnumPins(out pinEnum); if ((hr == 0) && (pinEnum != null)) { ServiceRegistration.Get <ILogger>().Info("got pins"); IPin[] pins = new IPin[1]; int iFetched; int iPinNo = 0; do { // Get the next pin iPinNo++; hr = pinEnum.Next(1, pins, ptrFetched); // In case of error stop the pin enumeration if (hr != 0) { break; } iFetched = Marshal.ReadInt32(ptrFetched); if (iFetched == 1 && pins[0] != null) { PinInfo pinInfo; hr = pins[0].QueryPinInfo(out pinInfo); if (hr == 0) { ServiceRegistration.Get <ILogger>().Info(" got pin#{0}:{1}", iPinNo - 1, pinInfo.name); FilterGraphTools.FreePinInfo(pinInfo); } else { ServiceRegistration.Get <ILogger>().Info(" got pin:?"); } PinDirection pinDir; pins[0].QueryDirection(out pinDir); if (pinDir == PinDirection.Output) { IPin other; hr = pins[0].ConnectedTo(out other); if (hr == 0 && other != null) { ServiceRegistration.Get <ILogger>().Info("Reconnecting {0}:{1}", info.achName, pinInfo.name); hr = graphBuilder.Reconnect(pins[0]); if (hr != 0) { ServiceRegistration.Get <ILogger>().Warn("Reconnect failed: {0}:{1}, code: 0x{2:x}", info.achName, pinInfo.name, hr); } PinInfo otherPinInfo; other.QueryPinInfo(out otherPinInfo); ReConnectAll(graphBuilder, otherPinInfo.filter); FilterGraphTools.FreePinInfo(otherPinInfo); Marshal.ReleaseComObject(other); } } Marshal.ReleaseComObject(pins[0]); } else { ServiceRegistration.Get <ILogger>().Info("no pins?"); break; } }while (iFetched == 1); FilterGraphTools.TryRelease(ref pinEnum); Marshal.FreeCoTaskMem(ptrFetched); } return; }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="formatGUID"></param> public void SetVideoFormat(DsDevice dsDevice, Guid formatGUID) { int hr; IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; IPin pPin = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); pPin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IAMStreamConfig videoStreamConfig = pPin as IAMStreamConfig; // Get the existing format block AMMediaType mediaType = null; hr = videoStreamConfig.GetFormat(out mediaType); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(mediaType.formatPtr, videoInfoHeader); // if overriding the framerate, set the frame rate if (camFPS > 0) { videoInfoHeader.AvgTimePerFrame = 10000000 / camFPS; } // if overriding the width, set the width if (camWidth > 0) { videoInfoHeader.BmiHeader.Width = camWidth; } // if overriding the Height, set the Height if (camHeight > 0) { videoInfoHeader.BmiHeader.Height = camHeight; } // Copy the media structure back Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false); mediaType.subType = formatGUID; // Set the new format hr = videoStreamConfig.SetFormat(mediaType); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mediaType); mediaType = null; } finally { Marshal.ReleaseComObject(pPin); pPin = null; } }
public void WorkerThread() { bool flag = false; Grabber pCallback = new Grabber(this); object o = null; object obj3 = null; object obj4 = null; IGraphBuilder builder = null; IBaseFilter pFilter = null; IBaseFilter filter2 = null; ISampleGrabber grabber2 = null; IFileSourceFilter filter3 = null; IMediaControl control = null; IMediaEventEx ex = null; while (!flag && !this.stopEvent.WaitOne(0, true)) { try { try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } o = Activator.CreateInstance(typeFromCLSID); builder = (IGraphBuilder)o; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating WM source"); } obj3 = Activator.CreateInstance(typeFromCLSID); pFilter = (IBaseFilter)obj3; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj4 = Activator.CreateInstance(typeFromCLSID); grabber2 = (ISampleGrabber)obj4; filter2 = (IBaseFilter)obj4; builder.AddFilter(pFilter, "source"); builder.AddFilter(filter2, "grabber"); AMMediaType pmt = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24 }; grabber2.SetMediaType(pmt); filter3 = (IFileSourceFilter)obj3; filter3.Load(this.source, null); if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } if (grabber2.GetConnectedMediaType(pmt) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader)); pCallback.Width = header.BmiHeader.Width; pCallback.Height = header.BmiHeader.Height; pmt.Dispose(); } builder.Render(DSTools.GetOutPin(filter2, 0)); grabber2.SetBufferSamples(false); grabber2.SetOneShot(false); grabber2.SetCallback(pCallback, 1); IVideoWindow window = (IVideoWindow)o; window.put_AutoShow(false); window = null; ex = (IMediaEventEx)o; control = (IMediaControl)o; control.Run(); while (!this.stopEvent.WaitOne(0, true)) { int num; int num2; int num3; Thread.Sleep(100); if (ex.GetEvent(out num, out num2, out num3, 0) == 0) { ex.FreeEventParams(num, num2, num3); if (num == 1) { break; } } } control.StopWhenReady(); } catch (Exception) { flag = true; } continue; } finally { ex = null; control = null; filter3 = null; builder = null; pFilter = null; filter2 = null; grabber2 = null; if (o != null) { Marshal.ReleaseComObject(o); o = null; } if (obj3 != null) { Marshal.ReleaseComObject(obj3); obj3 = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } } } }
public BaseFilterEx(IBaseFilter baseFilter, string name) { this.BaseFilter = baseFilter; Init(); Name = name; }
private void Cleanup() { if (graphBuilder == null || (VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.isCurrentStopping)) { return; } int hr; Log.Info("RTSPPlayer:cleanup DShow graph"); try { if (VMR9Util.g_vmr9 != null) { VMR9Util.g_vmr9.Vmr9MediaCtrl(_mediaCtrl); VMR9Util.g_vmr9.Enable(false); } if (mediaEvt != null) { hr = mediaEvt.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero); } videoWin = graphBuilder as IVideoWindow; if (videoWin != null && GUIGraphicsContext.VideoRenderer != GUIGraphicsContext.VideoRendererType.madVR) { videoWin.put_Owner(IntPtr.Zero); videoWin.put_Visible(OABool.False); } _mediaCtrl = null; mediaEvt = null; _mediaSeeking = null; mediaPos = null; basicAudio = null; basicVideo = null; videoWin = null; SubEngine.GetInstance().FreeSubtitles(); if (graphBuilder != null) { DirectShowUtil.RemoveFilters(graphBuilder); if (_rotEntry != null) { _rotEntry.SafeDispose(); _rotEntry = null; } DirectShowUtil.FinalReleaseComObject(graphBuilder); graphBuilder = null; } if (VMR9Util.g_vmr9 != null) { VMR9Util.g_vmr9.SafeDispose(); VMR9Util.g_vmr9 = null; } GUIGraphicsContext.form.Invalidate(true); _state = PlayState.Init; if (_mpegDemux != null) { Log.Info("cleanup mpegdemux"); DirectShowUtil.FinalReleaseComObject(_mpegDemux); _mpegDemux = null; } if (_rtspSource != null) { Log.Info("cleanup _rtspSource"); DirectShowUtil.FinalReleaseComObject(_rtspSource); _rtspSource = null; } if (_subtitleFilter != null) { DirectShowUtil.FinalReleaseComObject(_subtitleFilter); _subtitleFilter = null; if (this.dvbSubRenderer != null) { this.dvbSubRenderer.SetPlayer(null); } this.dvbSubRenderer = null; } if (vobSub != null) { Log.Info("cleanup vobSub"); DirectShowUtil.FinalReleaseComObject(vobSub); vobSub = null; } } catch (Exception ex) { if (VMR9Util.g_vmr9 != null) { VMR9Util.g_vmr9.RestoreGuiForMadVr(); VMR9Util.g_vmr9.SafeDispose(); } Log.Error("RTSPPlayer: Exception while cleanuping DShow graph - {0} {1}", ex.Message, ex.StackTrace); } //switch back to directx windowed mode Log.Info("RTSPPlayer: Disabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 0, 0, null); GUIWindowManager.SendMessage(msg); }
/// <summary> /// Constructs a <see cref="GraphRebuilder"/> instance. /// </summary> /// <param name="graphBuilder">The IGraphBuilder that originally created the graph</param> /// <param name="fileSource">The input source filter (usually IFileSourceFilter)</param> /// <param name="afterRebuild">Action to be called after rebuild is done</param> public GraphRebuilder(IGraphBuilder graphBuilder, IBaseFilter fileSource, Action afterRebuild) { _graphBuilder = graphBuilder; _fileSource = fileSource; _afterRebuild = afterRebuild; }
/// <summary> /// Completely tear down a filter graph and /// release all associated resources. /// </summary> void DestroyGraph() { // Derender the graph (This will stop the graph // and release preview window. It also destroys // half of the graph which is unnecessary but // harmless here.) (ignore errors) try { DerenderGraph(); } catch { } // Update the state after derender because it // depends on correct status. But we also want to // update the state as early as possible in case // of error. _actualGraphState = GraphState.Null; _isPreviewRendered = false; // Remove filters from the graph // This should be unnecessary but the Nvidia WDM // video driver cannot be used by this application // again unless we remove it. Ideally, we should // simply enumerate all the filters in the graph // and remove them. (ignore errors) if (_graphBuilder != null) { if (_videoCompressorFilter != null) { _graphBuilder.RemoveFilter(_videoCompressorFilter); } if (_videoDeviceFilter != null) { _graphBuilder.RemoveFilter(_videoDeviceFilter); } // Cleanup Marshal.ReleaseComObject(_graphBuilder); _graphBuilder = null; } if (_captureGraphBuilder != null) { Marshal.ReleaseComObject(_captureGraphBuilder); } _captureGraphBuilder = null; if (_videoDeviceFilter != null) { Marshal.ReleaseComObject(_videoDeviceFilter); } _videoDeviceFilter = null; if (_videoCompressorFilter != null) { Marshal.ReleaseComObject(_videoCompressorFilter); } _videoCompressorFilter = null; // These are copies of graphBuilder _mediaControl = null; _videoWindow = null; // For unmanaged objects we haven't released explicitly GC.Collect(); }
private void CMB_videosources_SelectedIndexChanged(object sender, EventArgs e) { if (MainV2.MONO) { return; } int hr; int count; int size; object o; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; AMMediaType media = null; VideoInfoHeader v; VideoStreamConfigCaps c; var modes = new List <GCSBitmapInfo>(); // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); var m_FilterGraph = (IFilterGraph2) new FilterGraph(); DsDevice[] capDevices; capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(capDevices[CMB_videosources.SelectedIndex].Mon, null, "Video input", out capFilter); try { DsError.ThrowExceptionForHR(hr); } catch (Exception ex) { CustomMessageBox.Show("Can not add video source\n" + ex); return; } // Find the stream config interface hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); var videoStreamConfig = o as IAMStreamConfig; if (videoStreamConfig == null) { CustomMessageBox.Show("Failed to get IAMStreamConfig"); return; } hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size); DsError.ThrowExceptionForHR(hr); var TaskMemPointer = Marshal.AllocCoTaskMem(size); for (var i = 0; i < count; i++) { var ptr = IntPtr.Zero; hr = videoStreamConfig.GetStreamCaps(i, out media, TaskMemPointer); v = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); c = (VideoStreamConfigCaps)Marshal.PtrToStructure(TaskMemPointer, typeof(VideoStreamConfigCaps)); modes.Add(new GCSBitmapInfo(v.BmiHeader.Width, v.BmiHeader.Height, c.MaxFrameInterval, c.VideoStandard.ToString(), media)); } Marshal.FreeCoTaskMem(TaskMemPointer); DsUtils.FreeAMMediaType(media); CMB_videoresolutions.DataSource = modes; if (Settings.Instance["video_options"] != "" && CMB_videosources.Text != "") { try { CMB_videoresolutions.SelectedIndex = Settings.Instance.GetInt32("video_options"); } catch { } // ignore bad entries } }
/// <summary> Shut down capture </summary> private void CloseInterfaces() { int hr; try { if (m_handle != IntPtr.Zero) { Marshal.FreeCoTaskMem(m_handle); m_handle = IntPtr.Zero; } } catch (Exception) { } try { if (mediaControl != null) { // Stop the graph hr = mediaControl.Stop(); mediaControl = null; } } catch (Exception ex) { Debug.WriteLine(ex); } #if DEBUG if (m_rot != null) { m_rot.Dispose(); } #endif if (this.mediaSeeking != null) { this.mediaSeeking = null; } if (this.frameStep != null) { this.frameStep = null; } if (this.sampleGrabber != null) { Marshal.ReleaseComObject(this.sampleGrabber); this.sampleGrabber = null; } if (nullrenderer != null) { Marshal.ReleaseComObject(nullrenderer); nullrenderer = null; } if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); graphBuilder = null; } GC.Collect(); }