/// <summary> /// Creates the filter. /// </summary> /// <returns>New filter instance.</returns> public IBaseFilter Build() { sampleGrabber = (ISampleGrabber) new SampleGrabber(); ConfigSampleGrabber(sampleGrabber); sampleGrabber.SetCallback(filter, SampleGrabberCallbackMethod); return (IBaseFilter) sampleGrabber; }
/// <summary> /// Default constructor for <see cref="SampleGrabberHelper"/> class. /// </summary> /// <param name="sampleGrabber">Pointer to COM-interface ISampleGrabber.</param> /// <param name="buffer_samples_of_current_frame">Flag means should helper store (buffer) samples of current frame or not.</param> public SampleGrabberHelper(ISampleGrabber sampleGrabber, bool buffer_samples_of_current_frame) { m_SampleGrabber = sampleGrabber; m_bBufferSamplesOfCurrentFrame = buffer_samples_of_current_frame; // tell the callback to ignore new images m_PictureReady = new ManualResetEvent(false); }
public void CloseInterfaces() { int hr; try { if (mediaCtrl != null) { hr = mediaCtrl.Stop(); mediaCtrl = null; } if (videoWin != null) { hr = videoWin.put_Visible(DsHlp.OAFALSE); hr = videoWin.put_Owner(IntPtr.Zero); videoWin = null; } baseGrabFlt = null; if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); } sampGrabber = null; if (capGraph != null) { Marshal.ReleaseComObject(capGraph); } capGraph = null; if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); } graphBuilder = null; if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } capFilter = null; if (capDevices != null) { foreach (DsDevice d in capDevices) { d.Dispose(); } capDevices = null; } } catch (Exception) {} }
/// <summary> /// /// </summary> private void InitCaptureGraph() { mGraphBuilder = (IFilterGraph2) new FilterGraph(); mMediaControl = (IMediaControl)mGraphBuilder; ISampleGrabber sampleGrabber = null; IBaseFilter captureFilter = null; ICaptureGraphBuilder2 captureGraph = null; try { captureGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); sampleGrabber = (ISampleGrabber) new SampleGrabber(); int hr = captureGraph.SetFiltergraph(mGraphBuilder); DsError.ThrowExceptionForHR(hr); hr = mGraphBuilder.AddSourceFilterForMoniker(mDevice.Mon, null, "Video input", out captureFilter); DsError.ThrowExceptionForHR(hr); IBaseFilter baseGrabberFilter = (IBaseFilter)sampleGrabber; ConfigureSampleGrabber(sampleGrabber); hr = mGraphBuilder.AddFilter(baseGrabberFilter, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (mFrameRate + mHeight + mWidth > 0) { InitConfigParams(captureGraph, captureFilter); } hr = captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, captureFilter, null, baseGrabberFilter); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampleGrabber); } finally { if (captureFilter != null) { Marshal.ReleaseComObject(captureFilter); captureFilter = null; } if (sampleGrabber != null) { Marshal.ReleaseComObject(sampleGrabber); sampleGrabber = null; } if (captureGraph != null) { Marshal.ReleaseComObject(captureGraph); captureGraph = null; } } }
private void Grab() { try { _mediaInfo = new MediaInfo(); double fps, length; _mediaDet = (IMediaDet) new MediaDet(); _mediaDet.put_Filename(_fileName); _mediaDet.get_FrameRate(out fps); _mediaDet.get_StreamLength(out length); _graphBuilder = (IGraphBuilder) new FilterGraph(); _sampleGrabber = (ISampleGrabber) new SampleGrabber(); ConfigSampleGrabber(this._sampleGrabber, fps, length); this._graphBuilder.AddFilter((IBaseFilter)_sampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(this._graphBuilder.RenderFile(_fileName, null)); _basicVideo = this._graphBuilder as IBasicVideo; AMMediaType media = new AMMediaType(); this._sampleGrabber.GetConnectedMediaType(media); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new Exception("Format type incorrect"); } double interval = 1 / fps; int videoWidth, videoHeight, videoStride; this._basicVideo.GetVideoSize(out videoWidth, out videoHeight); VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); videoStride = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8); this._mediaInfo.FPS = fps; this._mediaInfo.Duration = length; this._mediaInfo.MediaWidth = videoWidth; this._mediaInfo.MediaHeight = videoHeight; this._mediaInfo.MediaStride = videoStride; this._mediaInfo.MediaBitCount = videoInfoHeader.BmiHeader.BitCount; DsUtils.FreeAMMediaType(media); media = null; for (double i = 0; i < length; i = i + interval) { Bitmap bitmap = SnapShot(i); _frames.Add(new Frames(i, bitmap, i.ToString())); if (ReportProgressHandler != null) { ReportProgressHandler(i); } } } catch (Exception ee) { } }
protected void createGraph() { System.Type typeFromCLSID = null; object obj2 = null; if (this.graphState < GraphState.Created) { GC.Collect(); this.graphBuilder = (IGraphBuilder)Activator.CreateInstance(System.Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); typeFromCLSID = System.Type.GetTypeFromCLSID(Clsid.SampleGrabber, true); if (typeFromCLSID == null) { throw new NotImplementedException("DirectShow SampleGrabber not installed/registered"); } obj2 = Activator.CreateInstance(typeFromCLSID); this.sampGrabber = (ISampleGrabber)obj2; obj2 = null; AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; pmt.subType = MediaSubType.RGB24; pmt.formatType = FormatType.VideoInfo; int errorCode = this.sampGrabber.SetMediaType(pmt); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } this.mediaEvt = (IMediaEventEx)this.graphBuilder; this.baseGrabFlt = (IBaseFilter)this.sampGrabber; errorCode = this.graphBuilder.AddFilter(this.baseGrabFlt, "DS.NET Grabber"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } Guid capture = PinCategory.Capture; Guid interleaved = MediaType.Interleaved; Guid gUID = typeof(IAMStreamConfig).GUID; if (errorCode != 0) { Guid video = MediaType.Video; } object obj3 = null; Guid guid5 = PinCategory.Capture; Guid audio = MediaType.Audio; Guid guid7 = typeof(IAMStreamConfig).GUID; this.audioStreamConfig = obj3 as IAMStreamConfig; this.mediaControl = (IMediaControl)this.graphBuilder; this.videoCaps = null; this.audioCaps = null; obj3 = null; Guid guid8 = PinCategory.Capture; Guid guid9 = MediaType.Interleaved; Guid guid10 = typeof(IAMTVTuner).GUID; this.graphState = GraphState.Created; } }
protected void ReleaseAudioSampleGrabber() { try { if (sampleAnalyzerMustStop != null) { sampleAnalyzerMustStop.Set(); // This will cause the thread to stop } if (sampleAnalyzerThread != null) { sampleAnalyzerThread.Join(200); } IBaseFilter filter = sampleGrabber as IBaseFilter; if (filter != null) { IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder != null) { int hr = graphBuilder.RemoveFilter(filter); DsError.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(filter); sampleGrabber = null; } } catch (Exception ex) { Logger.LogException(ex); } if (rotEntry != null) { rotEntry.Dispose(); rotEntry = null; } lock (_vuLock) { _vuMeterData = null; } lock (_waveformLock) { _waveformData = null; } lock (_spectrogramLock) { _spectrogramData = null; } _actualAudioFormat = null; sampleGrabberConfigured.Reset(); }
protected void Initialize() { FrameReady = false; frame = new Texture2D(GraphicsDevice, Width, Height, false, SurfaceFormat.Color); FrameBGR = new byte[(Width * Height) * 3]; FrameRGBA = new byte[(Width * Height) * 4]; FrameGrayscale = new byte[(Width * Height)]; FrameHalfGrayscale = new byte[(Width / 2 * Height / 2)]; FrameQuarterGrayscale = new byte[(Width / 4 * Height / 4)]; GraphBuilder = (IGraphBuilder) new FilterGraph(); CaptureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); MediaControl = (IMediaControl)GraphBuilder; CaptureGraphBuilder.SetFiltergraph(GraphBuilder); object VideoInputObject = null; IBaseFilter VideoInput = null; IEnumMoniker classEnum; ICreateDevEnum devEnum = (ICreateDevEnum) new CreateDevEnum(); devEnum.CreateClassEnumerator(FilterCategory.VideoInputDevice, out classEnum, 0); Marshal.ReleaseComObject(devEnum); if (classEnum != null) { IMoniker[] moniker = new IMoniker[1]; if (classEnum.Next(moniker.Length, moniker, IntPtr.Zero) == DEVICE_ID) { Guid iid = typeof(IBaseFilter).GUID; moniker[0].BindToObject(null, null, ref iid, out VideoInputObject); } Marshal.ReleaseComObject(moniker[0]); Marshal.ReleaseComObject(classEnum); VideoInput = (IBaseFilter)VideoInputObject; } if (VideoInput != null) { isRunning = true; SampleGrabber = new SampleGrabber() as ISampleGrabber; GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render"); AMMediaType Type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; SampleGrabber.SetMediaType(Type); GraphBuilder.AddFilter(VideoInput, "Camera"); SampleGrabber.SetBufferSamples(false); SampleGrabber.SetOneShot(false); SampleGrabber.GetConnectedMediaType(new AMMediaType()); SampleGrabber.SetCallback((ISampleGrabberCB)this, 1); CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter); UpdateThread = new Thread(UpdateBuffer); UpdateThread.Start(); MediaControl.Run(); Marshal.ReleaseComObject(VideoInput); } }
public void Dispose() { int hr; try { if (mediaCtrl != null) { hr = mediaCtrl.Stop(); mediaCtrl = null; } if (mediaEvt != null) { mediaEvt = null; } baseGrabFlt = null; if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); } sampGrabber = null; if (capGraph != null) { Marshal.ReleaseComObject(capGraph); } capGraph = null; if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); } graphBuilder = null; if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } capFilter = null; if (capDevices != null) { foreach (DsDevice d in capDevices) { d.Dispose(); } capDevices = null; } } catch (Exception) { } }
/// <summary> /// Where the magic happens /// Build a graph with all the necessary filters, including a sample /// grabber to render video from an RTSP Video Source. /// </summary> /// <param name="pGraph">The actual graph</param> /// <param name="srcFile1">The URL of the RTSP stream</param> private void BuildGraph(IGraphBuilder pGraph, string srcFile1) { //reset our properties Filter = false; Sample = false; Render = false; Running = false; FirstFrame = string.Empty; //graph builder var pBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); int hr = pBuilder.SetFiltergraph(pGraph); CheckHr(hr, "Can't SetFiltergraph"); //add RTSP Filter var pRTSPFilter2 = CreateSourceFilter(pGraph, srcFile1); Filter = true; //add Colorspace conveter //*FYJ var pColorSpaceConverter = CreateColorSpace(pGraph); //*FYJ var pColorSpaceConverter2 = CreateColorSpace(pGraph); //add SampleGrabber var pSampleGrabber = CreateSampleGrabber(pGraph); //add Video Renderer var pVideoRenderer = CreateVideoRenderer(pGraph); //connect RTSP Filter and color space converter //*FYJ hr = pGraph.ConnectDirect(GetPin(pRTSPFilter2, "Out"), GetPin(pColorSpaceConverter, "Input"), null); //*FYJ CheckHr(hr, "Can't connect RTSP Filter and Color space converter"); //*FYJ Color = true; //connect color space converter and sample grabber //*FYJ hr = pGraph.ConnectDirect(GetPin(pColorSpaceConverter, "XForm Out"), GetPin(pSampleGrabber, "Input"), null); //*FYJ CheckHr(hr, "Can't connect RTSP Filter and Color Space Converter and Sample Grabber"); //?? Do we really need a second color space converter?? //*FYJ hr = pGraph.ConnectDirect(GetPin(pSampleGrabber, "Output"), GetPin(pColorSpaceConverter2, "Input"), null); //*FYJ CheckHr(hr, "Can't connect RTSP Filter and Color Space Converter and Sample Grabber and Color converter 2"); //*FYJ Sample = true; //add a renderer //*FYJ hr = pGraph.ConnectDirect(GetPin(pColorSpaceConverter2, "XForm Out"), GetPin(pVideoRenderer, "VMR Input0"), null); //*FYJ CheckHr(hr, "Can't connect RTSP Filter and Color Space Converter and Sample Grabber and Color converter and video render"); //*FYJ Render = true; pBuilder.RenderStream(null, null, pRTSPFilter2, pSampleGrabber, pVideoRenderer); _grabber = pSampleGrabber as ISampleGrabber; Trace("Graph Complete"); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { int hr; // Call back on the SampleCB routine hr = sampGrabber.SetCallback(this, 0); Marshal.ThrowExceptionForHR(hr); // Only one call hr = sampGrabber.SetOneShot(true); Marshal.ThrowExceptionForHR(hr); }
public SampleGrabberForm(ISampleGrabber sg, Filter f) { sampleGrabber = sg; f.sampleGrabberForm = this; filter = f; cb = new SampleGrabberCallback(); InitializeComponent(); timer.Interval = 1000; timer.Tick += new EventHandler(timer_Tick); timer.Start(); Text = "Samples grabbed by " + f.Name; }
/// <summary> /// Closes and releases all used interfaces. /// </summary> public void CloseInterfaces() { if (VMRenderer != null) { Marshal.ReleaseComObject(VMRenderer); VMRenderer = null; WindowlessCtrl = null; IMFVideoDisplayControl = null; MixerBitmap = null; } if (Processer != null) { Marshal.ReleaseComObject(Processer); AudioRender = null; } if (AudioRender != null) { Marshal.ReleaseComObject(AudioRender); AudioRender = null; } if (FilterGraph != null) { Marshal.ReleaseComObject(FilterGraph); FilterGraph = null; MediaControl = null; } if (SmartTee != null) { Marshal.ReleaseComObject(SmartTee); SmartTee = null; } if (SampleGrabber != null) { Marshal.ReleaseComObject(SampleGrabber); SampleGrabber = null; SampleGrabberFilter = null; } if (CaptureFilter != null) { Marshal.ReleaseComObject(CaptureFilter); CaptureFilter = null; } if (Crossbar != null) { Marshal.ReleaseComObject(Crossbar); Crossbar = null; } }
public void Dispose() { if (bDisposed) { return; } try { if (builder != null) { Marshal.ReleaseComObject(builder); builder = null; } } catch { } try { if (grabber != null) { Marshal.ReleaseComObject(grabber); grabber = null; } } catch { } try { if (mediaType != null) { DsUtils.FreeAMMediaType(mediaType); mediaType = null; } } catch { } try { if (samplePtr != IntPtr.Zero) { Marshal.FreeHGlobal(samplePtr); } } catch { } GC.SuppressFinalize(this); bDisposed = true; }
private void SetSGMediaType(object sender, EventArgs e) { if (rightClickedFilter != null) { ISampleGrabber sg = rightClickedFilter.BaseFilter as ISampleGrabber; if (sg != null) { using(var sf = new MediaTypeForm(sg)) sf.ShowDialog(); } } rightClickedFilter = null; }
public void InitDevice(DsDevice device, int iWidth, int iHeight) { int hr; object camDevice; Guid iid = typeof(IBaseFilter).GUID; device.Mon.BindToObject(null, null, ref iid, out camDevice); IBaseFilter camFilter = camDevice as IBaseFilter; m_CameraControl = camFilter as IAMCameraControl; m_VideoControl = camFilter as IAMVideoProcAmp; ISampleGrabber sampGrabber = null; graphBuilder = (IGraphBuilder) new FilterGraph(); //Create the Capture Graph Builder ICaptureGraphBuilder2 captureGraphBuilder = null; captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); //Add the Video input device to the graph hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber); DsError.ThrowExceptionForHR(hr); // Configure the sample grabber sampGrabber = new SampleGrabber() as ISampleGrabber; ConfigureSampleGrabber(sampGrabber); IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter; //Add the Video compressor filter to the graph hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRender = new NullRenderer() as IBaseFilter; graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber); InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight); hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); Marshal.ReleaseComObject(sampGrabber); Marshal.ReleaseComObject(captureGraphBuilder); }
/// <summary> /// create the used COM components and get the interfaces. /// </summary> private void GetInterfaces() { Type comType = null; object comObj = null; String errMsg = ""; try { comType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (comType == null) { throw new NotImplementedException(@"DirectShow FilterGraph not installed/registered!"); } comObj = Activator.CreateInstance(comType); graphBuilder = (IGraphBuilder)comObj; comObj = null; Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; comObj = DsBugWO.CreateDsInstance(ref clsid, ref riid); capGraph = (ICaptureGraphBuilder2)comObj; comObj = null; comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (comType == null) { throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); } comObj = Activator.CreateInstance(comType); sampGrabber = (ISampleGrabber)comObj; comObj = null; mediaCtrl = (IMediaControl)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; baseGrabFlt = (IBaseFilter)sampGrabber; } catch (Exception ee) { errMsg = "Could not get interfaces\r\n" + ee.Message; } finally { if (comObj != null) { Marshal.ReleaseComObject(comObj); } comObj = null; } if (errMsg.Length > 0) { throw new GoblinException(errMsg); } }
/// <summary> /// 初始化DirectShowLib相关接口变量 /// </summary> /// <param name="sampGrabber"></param> private void ConfigSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; this.sampleGrabber.SetMediaType(media); DsUtils.FreeAMMediaType(media); media = null; int hr = sampGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); }
/// <summary> /// Common routine used by RenderTo* /// </summary> /// <param name="icgb">ICaptureGraphBuilder2 to use</param> /// <param name="pCallback">Callback to use (or null)</param> /// <param name="sType">string to use in creating filter graph object descriptions</param> /// <param name="pPin">Pin to connect from</param> /// <param name="ibfCompressor">Compressor to use, or null for none</param> /// <param name="pOutput">Endpoint (renderer or file writer) to connect to</param> protected void RenderHelper(ICaptureGraphBuilder2 icgb, CallbackHandler pCallback, string sType, IPin pPin, IBaseFilter ibfCompressor, IBaseFilter pOutput) { int hr; IBaseFilter ibfSampleGrabber = null; try { // If no callback was provided, don't create a samplegrabber if (pCallback != null) { ISampleGrabber isg = (ISampleGrabber) new SampleGrabber(); ibfSampleGrabber = (IBaseFilter)isg; _dc.Add(ibfSampleGrabber); hr = isg.SetCallback(pCallback, 1); DESError.ThrowExceptionForHR(hr); hr = _graph.AddFilter(ibfSampleGrabber, sType + " sample grabber"); DESError.ThrowExceptionForHR(hr); } // If a compressor was provided, add it to the graph and connect it up if (ibfCompressor != null) { // Connect the pin. hr = _graph.AddFilter(ibfCompressor, sType + " Compressor"); DESError.ThrowExceptionForHR(hr); FilterGraphTools.ConnectFilters(_graph, pPin, ibfSampleGrabber, true); FilterGraphTools.ConnectFilters(_graph, ibfSampleGrabber, ibfCompressor, true); FilterGraphTools.ConnectFilters(_graph, ibfCompressor, pOutput, true); } else { // Just connect the SampleGrabber (if any) hr = icgb.RenderStream(null, null, pPin, ibfSampleGrabber, pOutput); DESError.ThrowExceptionForHR(hr); } } finally { if (ibfSampleGrabber != null) { Marshal.ReleaseComObject(ibfSampleGrabber); } } }
/// <summary> create the used COM components and get the interfaces. </summary> bool GetInterfaces() { Type comType = null; object comObj = null; try { comType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (comType == null) { throw new NotImplementedException(@"DirectShow FilterGraph not installed/registered!"); } comObj = Activator.CreateInstance(comType); graphBuilder = (IGraphBuilder)comObj; comObj = null; Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; comObj = DsBugWO.CreateDsInstance(ref clsid, ref riid); capGraph = (ICaptureGraphBuilder2)comObj; comObj = null; comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (comType == null) { throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); } comObj = Activator.CreateInstance(comType); sampGrabber = (ISampleGrabber)comObj; comObj = null; baseGrabFlt = (IBaseFilter)sampGrabber; mediaCtrl = (IMediaControl)graphBuilder; videoWin = (IVideoWindow)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; return(true); } catch (Exception ee) { MessageBox.Show("Could not get interfaces\r\n" + ee.Message, "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } finally { if (comObj != null) { Marshal.ReleaseComObject(comObj); } comObj = null; } }
public BWF2WavConverter() { // Initialize int hr; icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); gb = (IFilterGraph2) new FilterGraph(); sg = (ISampleGrabber) new SampleGrabber(); #if DEBUG m_rot = new DsROTEntry(gb); #endif hr = icgb.SetFiltergraph(gb); DsError.ThrowExceptionForHR(hr); }
public MainForm() { InitializeComponent(); graphbuilder = (IGraphBuilder)new FilterGraph(); samplegrabber = (ISampleGrabber)new SampleGrabber(); graphbuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber"); mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; mt.formatType = FormatType.VideoInfo; samplegrabber.SetMediaType(mt); PrintSeconds(); }
/// <summary>Clean up the video resources</summary> private void CloseInterfaces() { Stop(); AttachToWindow(IntPtr.Zero); PlayState = EPlayState.Cleanup; lock (m_shutdown_lock) m_shutdown = true; if (m_event != null) { m_event.Set(); // Release the thread } m_event = null; // Wait for the thread to end if (m_media_event_thread != null) { m_media_event_thread.Join(); } m_media_event_thread = null; if (m_samp_grabber != null) { Marshal.ReleaseComObject(m_samp_grabber); } m_media_ctrl = null; m_media_position = null; m_samp_grabber = null; m_video_window = null; m_basic_video = null; m_basic_audio = null; #if DEBUG if (m_ds_rot != null) { m_ds_rot.Dispose(); } m_ds_rot = null; #endif if (m_filter_graph != null) { Marshal.ReleaseComObject(m_filter_graph); } m_filter_graph = null; GC.Collect(); GC.WaitForPendingFinalizers(); }
// Build the capture graph for grabber and renderer.</summary> // (Control to show video in, Filename to play) private void SetupGraph(Control hWin, string FileName) { int hr; m_FilterGraph = new FilterGraph() as IFilterGraph2; ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; try { hr = icgb2.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); IBaseFilter sourceFilter = null; hr = m_FilterGraph.AddSourceFilter(FileName, FileName, out sourceFilter); DsError.ThrowExceptionForHR(hr); // Get the SampleGrabber interface m_sampGrabber = (ISampleGrabber) new SampleGrabber(); IBaseFilter baseGrabFlt = (IBaseFilter)m_sampGrabber; // Configure the Sample Grabber ConfigureSampleGrabber(m_sampGrabber); // Add it to the filter hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // Connect the pieces together, use the default renderer hr = icgb2.RenderStream(null, null, sourceFilter, baseGrabFlt, null); DsError.ThrowExceptionForHR(hr); // Configure the Video Window IVideoWindow videoWindow = m_FilterGraph as IVideoWindow; ConfigureVideoWindow(videoWindow, hWin); // Grab some other interfaces m_mediaEvent = m_FilterGraph as IMediaEvent; m_mediaCtrl = m_FilterGraph as IMediaControl; } finally { if (icgb2 != null) { Marshal.ReleaseComObject(icgb2); icgb2 = null; } } }
public AMMediaType SampleGrabberMediaType(string realname) { Filter flt = filters.Find(delegate(Filter f) { return(f.Name == realname); }); if (flt != null && flt.BaseFilter != null && flt.BaseFilter is ISampleGrabber) { ISampleGrabber isg = (ISampleGrabber)flt.BaseFilter; AMMediaType mt = new AMMediaType(); if (isg.GetConnectedMediaType(mt) >= 0) { return(mt); } } return(null); }
private static void ConfigSampleGrabber(ISampleGrabber sb) { // set the media type var media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; // that's the call to the ISampleGrabber interface sb.SetMediaType(media); DsUtils.FreeAMMediaType(media); }
/// <summary> /// 配置SampleGrabber /// </summary> /// <param name="sampGrabber"></param> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; int hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
private void Cleanup() { if (_control != null) { // Stop when ready _control.StopWhenReady(); } _graph = null; _sourceObject = null; _grabberObject = null; _grabber = null; _capGrabber = null; _control = null; }
void GetFilePlayInterfaces() { Type comType = null; object comObj = null; try { comType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (comType == null) { throw new NotImplementedException(@"DirectShow FilterGraph not installed/registered!"); } comObj = Activator.CreateInstance(comType); graphBuilder = (IGraphBuilder)comObj; comObj = null; comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (comType == null) { throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); } comObj = Activator.CreateInstance(comType); sampGrabber = (ISampleGrabber)comObj; comObj = null; comType = Type.GetTypeFromCLSID(Clsid.SmartTee); if (comType == null) { throw new NotImplementedException(@"Video Renderer not installed/registered!"); } comObj = Activator.CreateInstance(comType); smartTee = (IBaseFilter)comObj; comObj = null; mediaCtrl = (IMediaControl)graphBuilder; videoWin = (IVideoWindow)graphBuilder; baseGrabFlt = (IBaseFilter)sampGrabber; } catch (Exception ee) { throw new Exception("Could not get interfaces\r\n" + ee.Message); } finally { if (comObj != null) { Marshal.ReleaseComObject(comObj); } comObj = null; } }
public void Reset() { mediaControl = null; mediaEvent = null; Util.ReleaseComObject(ref graph); Util.ReleaseComObject(ref videoGrabberFilter); videoGrabber = null; Util.ReleaseComObject(ref videoNullFilter); videoGrabberCB = null; Util.ReleaseComObject(ref audioGrabberFilter); audioGrabber = null; Util.ReleaseComObject(ref audioNullFilter); audioGrabberCB = null; }
/// <summary> /// Adds SampleGrabber for screenshot making. /// </summary> private void AddFilter_SampleGrabber() { // Get the SampleGrabber interface SampleGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber SampleGrabberFilter = SampleGrabber as IBaseFilter; _pSampleGrabberHelper = new SampleGrabberHelper(SampleGrabber, false); _pSampleGrabberHelper.ConfigureMode(); // Add the sample grabber to the graph int hr = FilterGraph.AddFilter(SampleGrabberFilter, "Sample Grabber"); DsError.ThrowExceptionForHR(hr); return; }
// // configure the SampleGrabber filter of the graph // void ConfigSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; // set the media type. works with "stream" somehow... media = new AMMediaType(); media.majorType = MediaType.Stream; //media.subType = MediaSubType.WAVE; //media.formatType = FormatType.WaveEx; // that's the call to the ISampleGrabber interface sg.SetMediaType(media); DsUtils.FreeAMMediaType(media); media = null; // set BufferCB as the desired Callback function sg.SetCallback(this, 1); }
protected override void FreeResources() { /* We run the StopInternal() to avoid any * Dispatcher VeryifyAccess() issues */ StopInternal(); /* Let's clean up the base * class's stuff first */ base.FreeResources(); #if DEBUG if (m_rotEntry != null) { m_rotEntry.Dispose(); } m_rotEntry = null; #endif if (m_videoFrame != null) { m_videoFrame.Dispose(); m_videoFrame = null; } if (m_renderer != null) { Marshal.FinalReleaseComObject(m_renderer); m_renderer = null; } if (m_captureDevice != null) { Marshal.FinalReleaseComObject(m_captureDevice); m_captureDevice = null; } if (m_sampleGrabber != null) { Marshal.FinalReleaseComObject(m_sampleGrabber); m_sampleGrabber = null; } if (m_graph != null) { Marshal.FinalReleaseComObject(m_graph); m_graph = null; InvokeMediaClosed(new EventArgs()); } }
public CaptureForm() { InitializeComponent(); graph_builder = (IGraphBuilder)new FilterGraph(); media_control = (IMediaControl)graph_builder; events = (IMediaEventEx)graph_builder; grabber = (ISampleGrabber)new SampleGrabber(); AMMediaType media_type = new AMMediaType(); media_type.majorType = MediaType.Video; media_type.subType = MediaSubType.RGB24; grabber.SetMediaType( media_type ); grabber.SetCallback( this, 1 ); cbDevices.Items.AddRange( GetDevices( FilterCategory.VideoInputDevice ) ); }
/// <summary> /// Worker thread that captures the images /// </summary> private void RunWorker() { try { // Create the main graph m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString); // Create the grabber m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; m_grabberObject = m_isplGrabber as IBaseFilter; // Add the source and grabber to the main graph m_igrphbldGraph.AddFilter(m_sourceObject, "source"); m_igrphbldGraph.AddFilter(m_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; m_isplGrabber.SetMediaType(mediaType); if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); m_grbrCapGrabber.Width = header.BmiHeader.Width; m_grbrCapGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch (Exception retryException) { // Trace Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0)); m_isplGrabber.SetBufferSamples(false); m_isplGrabber.SetOneShot(false); m_isplGrabber.SetCallback(m_grbrCapGrabber, 1); // Get the video window IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph; wnd.put_AutoShow(false); wnd = null; // Create the control and run m_imedctrlControl = (IMediaControl)m_igrphbldGraph; m_imedctrlControl.Run(); // Wait for the stop signal while (!m_rstevStopSignal.WaitOne(0, true)) { Thread.Sleep(10); } // Stop when ready // _control.StopWhenReady(); m_imedctrlControl.Stop(); // Wait a bit... It apparently takes some time to stop IMediaControl Thread.Sleep(1000); } } catch (Exception ex) { // Trace Trace.WriteLine(ex); } finally { // Clean up this.Release(); } }
/// <summary> Set the options on the sample grabber </summary> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Choose to call BufferCB instead of SampleCB hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
protected override void FreeResources() { /* We run the StopInternal() to avoid any * Dispatcher VeryifyAccess() issues */ StopInternal(); /* Let's clean up the base * class's stuff first */ base.FreeResources(); #if DEBUG if (m_rotEntry != null) m_rotEntry.Dispose(); m_rotEntry = null; #endif if (m_videoFrame != null) { m_videoFrame.Dispose(); m_videoFrame = null; } if (m_renderer != null) { Marshal.FinalReleaseComObject(m_renderer); m_renderer = null; } if (m_captureDevice != null) { Marshal.FinalReleaseComObject(m_captureDevice); m_captureDevice = null; } if (m_sampleGrabber != null) { Marshal.FinalReleaseComObject(m_sampleGrabber); m_sampleGrabber = null; } if (m_graph != null) { Marshal.FinalReleaseComObject(m_graph); m_graph = null; InvokeMediaClosed(new EventArgs()); } }
/// <summary> /// Releases the capture device /// </summary> private void Release() { if (m_thrdWorker != null) { // Yes, stop via the event m_rstevStopSignal.Set(); Thread.Sleep(100); // Join the worker thread so we only continue when it exits. //_worker.Join(); // Stop the thread. m_thrdWorker.Abort(); // Dispose of the thread. m_thrdWorker = null; } // Clear the event if (m_rstevStopSignal != null) { m_rstevStopSignal.Close(); m_rstevStopSignal = null; } // Clean up m_igrphbldGraph = null; m_sourceObject = null; m_grabberObject = null; m_isplGrabber = null; m_grbrCapGrabber = null; m_imedctrlControl = null; }
private void SetupSampleGrabber(ISampleGrabber sampleGrabber) { var mediaType = new DirectShowLib.AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; int hr = sampleGrabber.SetMediaType(mediaType); DsUtils.FreeAMMediaType(mediaType); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetCallback(this, 0); DsError.ThrowExceptionForHR(hr); }
/// <summary> /// 解放 /// </summary> public virtual void Dispose() { try { Stop(); } catch (Exception) { } SampleGrabberCB.FrameSize = new Size(0, 0); SampleGrabberCB.Notify -= SampleGrabberCB_Notify; CaptureFilter = null; CaptureOutPin = null; SampleGrabber = null; SampleGrabberInPin = null; SampleGrabberOutPin = null; Renderer = null; RendererInPin = null; if (Seeking != null) Marshal.ReleaseComObject(Seeking); Seeking = null; if (GraphBuilder != null) Marshal.ReleaseComObject(GraphBuilder); GraphBuilder = null; }
private void SaveSizeInfo(ISampleGrabber sampGrabber) { AMMediaType media = new AMMediaType(); int hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); videoWidth = videoInfoHeader.BmiHeader.Width; videoHeight = videoInfoHeader.BmiHeader.Height; stride = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8); DsUtils.FreeAMMediaType(media); }
// Save the size parameters for use in SnapShot private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); try { if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Get the struct VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, videoInfoHeader); // Grab the size info m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = videoInfoHeader.BmiHeader.ImageSize / m_videoHeight; m_ImageSize = videoInfoHeader.BmiHeader.ImageSize; } finally { DsUtils.FreeAMMediaType(media); media = null; } }
public void CloseResources() { CloseInterfaces(); lock (this) { if (latestBitmap != null) { latestBitmap.Dispose(); latestBitmap = null; } if (samplGrabber != null) { Marshal.ReleaseComObject(samplGrabber); samplGrabber = null; } if (capBuilder != null) { Marshal.ReleaseComObject(capBuilder); capBuilder = null; } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (Settings.Default.VideoGraphDebugMode) { if (rot != null) { rot.Dispose(); rot = null; } } crossbar = null; } }
public void SetupGraph(DsDevice dev, bool runOCR, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight) { try { filterGraph = (IFilterGraph2)new FilterGraph(); mediaCtrl = filterGraph as IMediaControl; capBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); samplGrabber = (ISampleGrabber)new SampleGrabber(); int hr = capBuilder.SetFiltergraph(filterGraph); DsError.ThrowExceptionForHR(hr); if (Settings.Default.VideoGraphDebugMode) { if (rot != null) { rot.Dispose(); rot = null; } rot = new DsROTEntry(filterGraph); } SetupGraphInternal(dev, selectedFormat, ref iFrameRate, ref iWidth, ref iHeight); // Now that sizes are fixed/known, store the sizes SaveSizeInfo(samplGrabber); crossbar = CrossbarHelper.SetupTunerAndCrossbar(capBuilder, capFilter); latestBitmap = new Bitmap(iWidth, iHeight, PixelFormat.Format24bppRgb); fullRect = new Rectangle(0, 0, latestBitmap.Width, latestBitmap.Height); NativeHelpers.SetupCamera( Settings.Default.CameraModel, iWidth, iHeight, Settings.Default.HorizontalFlip, Settings.Default.VerticalFlip, Settings.Default.IsIntegrating, (float)Settings.Default.MinSignatureDiffRatio, (float)Settings.Default.MinSignatureDiff, Settings.Default.GammaDiff, Settings.Default.ForceNewFrameOnLockedRate, dev.Name, selectedFormat.AsSerialized(), selectedFormat.FrameRate); NativeHelpers.SetupAav(Settings.Default.RecordStatusSectionOnly ? AavImageLayout.StatusSectionOnly : Settings.Default.AavImageLayout, Settings.Default.AavCompression); ocrEnabled = false; string errorMessage; if (runOCR) { OcrConfiguration ocrConfig = OcrSettings.Instance[Settings.Default.SelectedOcrConfiguration]; errorMessage = NativeHelpers.SetupBasicOcrMetrix(ocrConfig); if (errorMessage != null && callbacksObject != null) callbacksObject.OnError(-1, errorMessage); else { NativeHelpers.SetupOcr(ocrConfig); ocrEnabled = true; } } else { errorMessage = NativeHelpers.SetupTimestampPreservation(false, 0, 0); if (errorMessage != null && callbacksObject != null) callbacksObject.OnError(-1, errorMessage); } } catch { CloseResources(); if (callbacksObject != null) callbacksObject.OnError(-1, "Error initialising the camera. The selected video mode may not be supported by the camera."); throw; } }
private void InitCaptureInterface() { // release com object (useless here but can't hurt) Cleanup(true); this.fmc = new FilgraphManagerClass(); // create the cg object and add the filter graph to it Type t = Type.GetTypeFromCLSID(CLSID_CaptureGraphBuilder2); this.icgb = (ICaptureGraphBuilder2)Activator.CreateInstance(t); t = Type.GetTypeFromCLSID(CLSID_SampleGrabber); this.isg = (ISampleGrabber)Activator.CreateInstance(t); // source filter (the capture device) this.sf = (IBaseFilter)this.SourceFilterList[this.cbxDevice.SelectedIndex]; // sample grabber filter this.sgf = (IBaseFilter)this.isg; object o = null; this.icgb.RemoteFindInterface(ref PIN_CATEGORY_CAPTURE, ref MEDIATYPE_Video, sf, ref IID_IAMStreamConfig, out o); this.iamsc = (IAMStreamConfig)o; // set sample grabber media type this.SGMediaType = new _AMMediaType(); this.SGMediaType.majortype = MEDIATYPE_Video; this.SGMediaType.subtype = MEDIASUBTYPE_RGB24; this.SGMediaType.formattype = FORMAT_VideoInfo; this.isg.SetMediaType(ref SGMediaType); this.isg.SetOneShot(0); this.isg.SetBufferSamples(1); }
private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.WaveEx) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Audio Format"); } WaveFormatEx infoHeader = (WaveFormatEx)Marshal.PtrToStructure(media.formatPtr, typeof(WaveFormatEx)); m_Channels = infoHeader.nChannels; m_SampleRate = infoHeader.nSamplesPerSec; m_BitsPerSample = infoHeader.wBitsPerSample; DsUtils.FreeAMMediaType(media); media = null; }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; media = new AMMediaType(); media.majorType = MediaType.Audio; /*media.subType = MediaSubType.WAVE; media.formatType = FormatType.WaveEx;*/ hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); // buffer callback (0 = Sample callback) DsError.ThrowExceptionForHR(hr); }
/// <summary> Shut down capture </summary> private void CloseInterfaces() { int hr; try { if (m_handle != IntPtr.Zero) { Marshal.FreeCoTaskMem(m_handle); m_handle = IntPtr.Zero; } } catch (Exception) { } try { if (mediaControl != null) { // Stop the graph hr = mediaControl.Stop(); mediaControl = null; } } catch (Exception ex) { Debug.WriteLine(ex); } #if DEBUG if (m_rot != null) { m_rot.Dispose(); } #endif if (this.mediaSeeking != null) this.mediaSeeking = null; if (this.frameStep != null) this.frameStep = null; if (this.sampleGrabber != null) { Marshal.ReleaseComObject(this.sampleGrabber); this.sampleGrabber = null; } if (nullrenderer != null) { Marshal.ReleaseComObject(nullrenderer); nullrenderer = null; } if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); graphBuilder = null; } GC.Collect(); }
/// <summary> /// Closes and releases all used interfaces. /// </summary> public void CloseInterfaces() { if (VMRenderer != null) { Marshal.ReleaseComObject(VMRenderer); VMRenderer = null; WindowlessCtrl = null; MixerBitmap = null; } if (FilterGraph != null) { Marshal.ReleaseComObject(FilterGraph); FilterGraph = null; MediaControl = null; } if (SmartTee != null) { Marshal.ReleaseComObject(SmartTee); SmartTee = null; } if (SampleGrabber != null) { Marshal.ReleaseComObject(SampleGrabber); SampleGrabber = null; SampleGrabberFilter = null; } if (CaptureFilter != null) { Marshal.ReleaseComObject(CaptureFilter); CaptureFilter = null; } if (Crossbar != null) { Marshal.ReleaseComObject(Crossbar); Crossbar = null; } }
// Shut down capture private void CloseInterfaces() { int hr; lock (this) { if (m_State != GraphState.Exiting) { m_State = GraphState.Exiting; // Release the thread (if the thread was started) if (m_mre != null) { m_mre.Set(); } } if (m_mediaCtrl != null) { // Stop the graph hr = m_mediaCtrl.Stop(); m_mediaCtrl = null; } if (m_sampGrabber != null) { Marshal.ReleaseComObject(m_sampGrabber); m_sampGrabber = null; } if (m_FilterGraph != null) { Marshal.ReleaseComObject(m_FilterGraph); m_FilterGraph = null; } } GC.Collect(); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; sampGrabber.SetBufferSamples(false); sampGrabber.SetOneShot(false); hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
// Build the capture graph for grabber and renderer.</summary> // (Control to show video in, Filename to play) private void SetupGraph(string FileName) { int hr; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; // Get a ICaptureGraphBuilder2 to help build the graph ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; try { // Link the ICaptureGraphBuilder2 to the IFilterGraph2 hr = icgb2.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the filters necessary to render the file. This function will // work with a number of different file types. IBaseFilter sourceFilter = null; hr = m_FilterGraph.AddSourceFilter(FileName, FileName, out sourceFilter); DsError.ThrowExceptionForHR(hr); // Get the SampleGrabber interface m_sampGrabber = (ISampleGrabber)new SampleGrabber(); IBaseFilter baseGrabFlt = (IBaseFilter)m_sampGrabber; // Configure the Sample Grabber ConfigureSampleGrabber(m_sampGrabber); // Add it to the filter hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph IBaseFilter nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // Connect the pieces together, use the default renderer hr = icgb2.RenderStream(null, null, sourceFilter, baseGrabFlt, nullrenderer); DsError.ThrowExceptionForHR(hr); // Now that the graph is built, read the dimensions of the bitmaps we'll be getting SaveSizeInfo(m_sampGrabber); // Grab some other interfaces m_mediaEvent = m_FilterGraph as IMediaEvent; m_mediaCtrl = m_FilterGraph as IMediaControl; } finally { if (icgb2 != null) { Marshal.ReleaseComObject(icgb2); icgb2 = null; } } #if DEBUG // Double check to make sure we aren't releasing something // important. GC.Collect(); GC.WaitForPendingFinalizers(); #endif }
/// <summary> /// Configures the DirectShow graph to play the selected video capture /// device with the selected parameters /// </summary> private void SetupGraph() { /* Clean up any messes left behind */ FreeResources(); try { /* Create a new graph */ m_graph = (IGraphBuilder)new FilterGraphNoThread(); #if DEBUG m_rotEntry = new DsROTEntry(m_graph); #endif /* Create a capture graph builder to help * with rendering a capture graph */ var graphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); /* Set our filter graph to the capture graph */ int hr = graphBuilder.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); /* Add our capture device source to the graph */ if (m_videoCaptureSourceChanged) { m_captureDevice = AddFilterByName(m_graph, FilterCategory.VideoInputDevice, VideoCaptureSource); m_videoCaptureSourceChanged = false; } else if (m_videoCaptureDeviceChanged) { m_captureDevice = AddFilterByDevicePath(m_graph, FilterCategory.VideoInputDevice, VideoCaptureDevice.DevicePath); m_videoCaptureDeviceChanged = false; } /* If we have a null capture device, we have an issue */ if (m_captureDevice == null) throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource)); if (UseYuv && !EnableSampleGrabbing) { /* Configure the video output pin with our parameters and if it fails * then just use the default media subtype*/ if (!SetVideoCaptureParameters(graphBuilder, m_captureDevice, MediaSubType.YUY2)) SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty); } else /* Configure the video output pin with our parameters */ SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty); var rendererType = VideoRendererType.VideoMixingRenderer9; /* Creates a video renderer and register the allocator with the base class */ m_renderer = CreateVideoRenderer(rendererType, m_graph, 1); if (rendererType == VideoRendererType.VideoMixingRenderer9) { var mixer = m_renderer as IVMRMixerControl9; if (mixer != null && !EnableSampleGrabbing && UseYuv) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetYUV; /* Prefer YUV */ mixer.SetMixingPrefs(dwPrefs); } } if (EnableSampleGrabbing) { m_sampleGrabber = (ISampleGrabber)new SampleGrabber(); SetupSampleGrabber(m_sampleGrabber); hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); } IBaseFilter mux = null; IFileSinkFilter sink = null; if (!string.IsNullOrEmpty(this.m_fileName)) { hr = graphBuilder.SetOutputFileName(MediaSubType.Asf, this.m_fileName, out mux, out sink); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, m_captureDevice, null, mux); DsError.ThrowExceptionForHR(hr); // use the first audio device var audioDevices = DsDevice.GetDevicesOfCat(FilterCategory.AudioInputDevice); if (audioDevices.Length > 0) { var audioDevice = AddFilterByDevicePath(m_graph, FilterCategory.AudioInputDevice, audioDevices[0].DevicePath); hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Audio, audioDevice, null, mux); DsError.ThrowExceptionForHR(hr); } } hr = graphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, m_captureDevice, null, m_renderer); DsError.ThrowExceptionForHR(hr); /* Register the filter graph * with the base classes */ SetupFilterGraph(m_graph); /* Sets the NaturalVideoWidth/Height */ SetNativePixelSizes(m_renderer); HasVideo = true; /* Make sure we Release() this COM reference */ if (mux != null) { Marshal.ReleaseComObject(mux); } if (sink != null) { Marshal.ReleaseComObject(sink); } Marshal.ReleaseComObject(graphBuilder); } catch (Exception ex) { /* Something got fuct up */ FreeResources(); InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } /* Success */ InvokeMediaOpened(); }
private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info VideoInfoHeader videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure(media.formatPtr, typeof (VideoInfoHeader)); m_videoWidth = videoInfoHeader.BmiHeader.Width; m_videoHeight = videoInfoHeader.BmiHeader.Height; m_stride = m_videoWidth*(videoInfoHeader.BmiHeader.BitCount/8); m_avgtimeperframe = videoInfoHeader.AvgTimePerFrame; DsUtils.FreeAMMediaType(media); media = null; }
/// <summary> /// 解放 /// </summary> public virtual void Dispose() { try { Stop(); } catch (Exception) { } DeviceName = ""; SampleGrabberCB.FrameSize = new Size(0, 0); SampleGrabberCB.Notify -= SampleGrabberCB_Notify; CaptureFilter = null; CaptureOutPin = null; SampleGrabber = null; Renderer = null; Mux = null; Sync = null; if (CaptureBuilder != null) Marshal.ReleaseComObject(CaptureBuilder); CaptureBuilder = null; if (GraphBuilder != null) Marshal.ReleaseComObject(GraphBuilder); GraphBuilder = null; }
/// <summary> /// グラフの生成 /// </summary> /// <param name="output_file">出力ファイル</param> public virtual void Setup(string output_file) { this.Dispose(); try { CxDSCameraParam param = this.Param; // グラフビルダー. // CoCreateInstance GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph))); #region フィルタ追加. // 画像入力フィルタ. IBaseFilter capture = CreateVideoCapture(param); if (capture == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(capture, "CaptureFilter"); IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT); this.CaptureFilter = capture; this.CaptureOutPin = capture_out; // サンプルグラバー. IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber(); if (grabber == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(grabber, "SampleGrabber"); this.SampleGrabber = (ISampleGrabber)grabber; #endregion #region キャプチャビルダー: { int hr = 0; CaptureBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_CaptureGraphBuilder2))); hr = CaptureBuilder.SetFiltergraph(GraphBuilder); if (string.IsNullOrEmpty(output_file)) { // レンダラー. IBaseFilter renderer = null; renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer))); if (renderer == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(renderer, "Renderer"); this.Renderer = renderer; #if true // IGraphBuilder.Connect の代わりに ICaptureGraphBuilder2.RenderStream を使用する. // fig) [capture]-out->-in-[sample grabber]-out->-in-[null render] hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, renderer); #else // ピンの取得. IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT); IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT); // ピンの接続. GraphBuilder.Connect(capture_out, grabber_in); GraphBuilder.Connect(grabber_out, renderer_in); // ピンの保管. //SampleGrabberInPin = grabber_in; //SampleGrabberOutPin = grabber_out; //RendererInPin = renderer_in; #endif } else { IBaseFilter mux = null; IFileSinkFilter sync = null; hr = CaptureBuilder.SetOutputFileName(new Guid(GUID.MEDIASUBTYPE_Avi), output_file, ref mux, ref sync); hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, mux); this.Mux = mux; this.Sync = sync; } } #endregion #region 保管: フレームサイズ. VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber); this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader; this.SampleGrabberCB.FrameSize = new Size( System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth), System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight) ); #endregion #region 保管: デバイス名称. try { if (string.IsNullOrEmpty(param.FilterInfo.Name) == false) { this.DeviceName = param.FilterInfo.Name; } else { int filter_index = param.FilterInfo.Index; List<DSLab.CxDSFilterInfo> filters = DSLab.Axi.GetFilterList(DSLab.GUID.CLSID_VideoInputDeviceCategory); if (0 <= filter_index && filter_index < filters.Count) { this.DeviceName = filters[filter_index].Name; } } } catch (System.Exception) { this.DeviceName = ""; } #endregion // DEBUG #if DEBUG DebugPrint(this.GraphBuilder); #endif } catch (Exception ex) { this.Dispose(); throw new DSLab.CxDSException(ex); } }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; // Set the media type to Video/RBG24 int hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
/// <summary> /// グラフの生成 /// </summary> public virtual void Setup() { this.Dispose(); try { // グラフ. // CoCreateInstance GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph))); #region フィルタ追加. // ファイル入力. IBaseFilter capture = null; GraphBuilder.AddSourceFilter(SourceFile, "CaptureFilter", ref capture); if (capture == null) throw new System.IO.IOException(); #if false // DMO ラッパーフィルタ. // https://msdn.microsoft.com/ja-jp/library/cc371140.aspx IBaseFilter dmo = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_DMOWrapperFilter))); if (dmo != null) { //// Mpeg4 Decoder DMO //// F371728A-6052-4D47-827C-D039335DFE0A //// 4A69B442-28BE-4991-969C-B500ADF5D8A8 //// mpg4decd.dll [C:\Windows\System32, C:\Windows\SysWOW64] var idmo = (IDMOWrapperFilter)dmo; idmo.Init(new Guid("F371728A-6052-4D47-827C-D039335DFE0A"), new Guid("4A69B442-28BE-4991-969C-B500ADF5D8A8")); idmo = null; this.GraphBuilder.AddFilter(dmo, "Mpeg4 Decoder DMO"); } #endif #if false // Avi Splitter IBaseFilter splitter = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVISplitter))); if (splitter == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(splitter, "Avi Splitter"); // Avi Decompressor IBaseFilter decompressor = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVIDec))); if (decompressor == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(decompressor, "Avi Decompressor"); #endif // サンプルグラバー. IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber(); if (grabber == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(grabber, "SampleGrabber"); // レンダラー. IBaseFilter renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer))); if (renderer == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(renderer, "Renderer"); #endregion #region ピンの取得. IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT); IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT); #endregion #region ピンの接続. GraphBuilder.Connect(capture_out, grabber_in); GraphBuilder.Connect(grabber_out, renderer_in); #endregion #region 保管: インターフェース. CaptureFilter = capture; CaptureOutPin = capture_out; SampleGrabber = (ISampleGrabber)grabber; SampleGrabberInPin = grabber_in; SampleGrabberOutPin = grabber_out; Renderer = renderer; RendererInPin = renderer_in; #endregion #region 保管: フレームサイズ. VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber); this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader; this.SampleGrabberCB.FrameSize = new Size( System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth), System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight) ); #endregion #region インタフェースの抽出: { DSLab.IGraphBuilder graph = this.GraphBuilder; DSLab.IEnumFilters filters = null; DSLab.IBaseFilter filter = null; int fetched = 0; int hr = graph.EnumFilters(ref filters); while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK) { if (fetched == 0) break; if (filter is DSLab.IMediaSeeking) { // シーク操作用. Seeking = (DSLab.IMediaSeeking)filter; } else { // フィルタ解放. Marshal.ReleaseComObject(filter); filter = null; } } // 解放. Marshal.ReleaseComObject(filters); } #endregion // DEBUG #if DEBUG DebugPrint(this.GraphBuilder); #endif } catch (Exception ex) { throw new DSLab.CxDSException(ex); } }
/// <summary> /// Releases the capture device /// </summary> private void Release() { // Stop the thread Worker = null; // Clear the event if (StopSignal != null) { StopSignal.Close(); StopSignal = null; } // Clean up Graph = null; SourceObject = null; GrabberObject = null; Grabber = null; CapGrabber = null; Control = null; }