public CAviDS(string filename, double playSpeed) { builder = new FilterGraph() as IGraphBuilder; grabber = new SampleGrabber() as ISampleGrabber; mediaType = new AMMediaType(); mediaType.majorType = MediaType.Video; mediaType.subType = MediaSubType.RGB32; mediaType.formatType = FormatType.VideoInfo; DsError.ThrowExceptionForHR(grabber.SetMediaType(mediaType)); DsError.ThrowExceptionForHR(builder.AddFilter(grabber as IBaseFilter, "Sample Grabber(DTXMania)")); DsError.ThrowExceptionForHR(builder.RenderFile(filename, null)); CDirectShow.ConnectNullRendererFromSampleGrabber(builder, grabber as IBaseFilter); if (builder is IVideoWindow videoWindow) { videoWindow.put_AutoShow(OABool.False); } DsError.ThrowExceptionForHR(grabber.GetConnectedMediaType(mediaType)); videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); nWidth = videoInfo.BmiHeader.Width; nHeight = videoInfo.BmiHeader.Height; seeker = builder as IMediaSeeking; DsError.ThrowExceptionForHR(seeker.GetDuration(out nMediaLength)); DsError.ThrowExceptionForHR(seeker.SetRate(playSpeed / 20.0)); control = builder as IMediaControl; filter = builder as IMediaFilter; grabber.SetBufferSamples(BufferThem: true); Run(); Pause(); bPlaying = false; bPause = false; }
/// <summary> /// /// </summary> /// <param name="sampleGrabber"></param> /// <param name="subType"></param> /// <param name="grabberCallback"></param> private void ConfigureSampleGrabber(ISampleGrabber sampleGrabber, Guid subType, ISampleGrabberCB grabberCallback) { AMMediaType media; int hr; // Set the media type to Video and format as subtype guid media = new AMMediaType(); media.majorType = MediaType.Video; // WORKAROUND !!! In case of normal USB Web camera, it should be set RGB24 with parameter null. if (subType != MediaSubType.Null) { media.subType = subType; } else { media.subType = MediaSubType.RGB24; } media.formatType = FormatType.VideoInfo; hr = sampleGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // +EDDY_TEST ///sampleGrabber.SetBufferSamples( true ); ///sampleGrabber.SetOneShot( true ); // -EDDY_TEST hr = sampleGrabber.SetCallback(grabberCallback, 1); // BufferCallback //hr = sampleGrabber.SetCallback( grabberCallback, 0 ); // SampleCallback DsError.ThrowExceptionForHR(hr); }
private void OnOK(object sender, EventArgs e) { AMMediaType mt = new AMMediaType(); string strMajor = comboMajorType.SelectedItem.ToString(); string strSub = comboSubType.SelectedItem.ToString(); foreach (FieldInfo m in typeof(MediaType).GetFields()) { if (m.Name == strMajor) { mt.majorType = (Guid)m.GetValue(null); } } foreach (FieldInfo m in typeof(MediaSubType).GetFields()) { if (m.Name == strSub) { mt.subType = (Guid)m.GetValue(null); } } if (sampleGrabber != null) { sampleGrabber.SetMediaType(mt); } if (streamConfig != null) { streamConfig.SetFormat(mt); } DsUtils.FreeAMMediaType(mt); Close(); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { int hr; AMMediaType media = new AMMediaType(); // Set the media type media.majorType = MediaType.Video; if (bytes_per_pixel == 1) { media.subType = MediaSubType.RGB8; } else { media.subType = MediaSubType.RGB24; } media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
/// <summary> /// Configures mode (mediatype, format type and etc). /// </summary> public void ConfigureMode() { AMMediaType media = new AMMediaType(); // Set the media type to Video/RBG24 media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; int hr = m_SampleGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber // To save current frame via SnapshotNextFrame //ISampleGrabber::SetCallback method // Note [Deprecated. This API may be removed from future releases of Windows.] // http://msdn.microsoft.com/en-us/library/windows/desktop/dd376992%28v=vs.85%29.aspx hr = m_SampleGrabber.SetCallback(this, 1); // 1 == WhichMethodToCallback, call the ISampleGrabberCB::BufferCB method DsError.ThrowExceptionForHR(hr); // To save current frame via SnapshotCurrentFrame if (m_bBufferSamplesOfCurrentFrame) { //ISampleGrabber::SetBufferSamples method // Note [Deprecated. This API may be removed from future releases of Windows.] // http://msdn.microsoft.com/en-us/windows/dd376991 hr = m_SampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); } return; }
/// <summary> /// サンプルグラバーの生成 /// </summary> /// <returns> /// 生成されたサンプルグラバーを返します。 /// </returns> internal virtual ISampleGrabber CreateSampleGrabber() { ISampleGrabber grabber = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_SampleGrabber))); if (grabber != null) { // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE(); media_type.majortype = new Guid(GUID.MEDIATYPE_Video); // Video media_type.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); // RGB24 media_type.formattype = new Guid(GUID.FORMAT_VideoInfo); // VideoInfo grabber.SetMediaType(media_type); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(this.SampleGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(this.SampleGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. SampleGrabberCB.Notify += SampleGrabberCB_Notify; } return(grabber); }
/// <summary> Set the options on the sample grabber </summary> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Choose to call BufferCB instead of SampleCB hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); //hr = this.sampleGrabber.SetBufferSamples(true); //DsError.ThrowExceptionForHR(hr); //hr = this.sampleGrabber.SetOneShot(true); //DsError.ThrowExceptionForHR(hr); }
/// <summary> /// Configure the ample grabber. /// </summary> /// <param name="sampGrabber">The sanmple grabber.</param> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public VideoPlayer(string FileName, GraphicsDevice graphicsDevice) { try { currentState = VideoState.Stopped; filename = FileName; InitInterfaces(); SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel) outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color); } catch { throw new Exception("Unable to Load or Play the video file"); } }
private void InitAudioGrabber(IBaseFilter sourceF) { audioGrabberFilter = new SampleGrabber() as IBaseFilter; if (audioGrabberFilter == null) { throw new COMException("Cannot create SampleGrabber"); } int hr = graph.AddFilter(audioGrabberFilter, "Audio Sample Grabber"); DsError.ThrowExceptionForHR(hr); audioGrabber = audioGrabberFilter as ISampleGrabber; if (audioGrabber == null) { throw new COMException("Cannot obtain ISampleGrabber"); } { AMMediaType mt = new AMMediaType(); mt.majorType = DirectShowLib.MediaType.Audio; mt.subType = DirectShowLib.MediaSubType.PCM; hr = audioGrabber.SetMediaType(mt); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mt); } hr = ConnectSampleGrabber(graph, sourceF, audioGrabberFilter); if (0 != hr) { // Cannot connect the audio grabber. Remove the filter from the graph. hr = graph.RemoveFilter(audioGrabberFilter); DsError.ThrowExceptionForHR(hr); Util.ReleaseComObject(ref audioGrabberFilter); audioGrabber = null; return; } audioNullFilter = new NullRenderer() as IBaseFilter; if (audioNullFilter == null) { throw new COMException("Cannot create NullRenderer"); } hr = graph.AddFilter(audioNullFilter, "Null Filter"); DsError.ThrowExceptionForHR(hr); hr = Util.ConnectFilters(graph, audioGrabberFilter, audioNullFilter); DsError.ThrowExceptionForHR(hr); audioGrabberCB = new SampleGrabberCB(); hr = audioGrabber.SetCallback(audioGrabberCB, (int)CBMethod.Sample); DsError.ThrowExceptionForHR(hr); }
public DSVideoPlayer(string filename, GraphicsDevice graphicsDevice) { try { // Open DirectShow Interfaces InitInterfaces(); Info = new DSVideoInfo(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(FG_GraphBuilder.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = DSVideoInfo.MEDIATYPE_Video; // Video mt.subType = DSVideoInfo.MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = DSVideoInfo.FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); //// Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(FG_GraphBuilder.RenderFile(filename, null)); Info.FileName = filename; //// Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)FG_GraphBuilder; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); //// Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information Info.Height = pVideoHeader.BmiHeader.Height; Info.Width = pVideoHeader.BmiHeader.Width; Info.AvgTimePerFrame = pVideoHeader.AvgTimePerFrame; Info.BitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(FG_MediaSeeking.GetDuration(out Info.Duration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(Info.Height * Info.Width) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(Info.Height * Info.Width) * 3]; // BGR24 format (3 bytes per pixel) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, Info.Width, Info.Height, 1, TextureUsage.None, SurfaceFormat.Color); } catch (Exception ex) { throw new Exception("不能加载或播放该视频: " + ex.Message); } }
protected void Initialize() { FrameReady = false; frame = new Texture2D(GraphicsDevice, Width, Height, false, SurfaceFormat.Color); FrameBGR = new byte[(Width * Height) * 3]; FrameRGBA = new byte[(Width * Height) * 4]; FrameGrayscale = new byte[(Width * Height)]; FrameHalfGrayscale = new byte[(Width / 2 * Height / 2)]; FrameQuarterGrayscale = new byte[(Width / 4 * Height / 4)]; GraphBuilder = (IGraphBuilder) new FilterGraph(); CaptureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); MediaControl = (IMediaControl)GraphBuilder; CaptureGraphBuilder.SetFiltergraph(GraphBuilder); object VideoInputObject = null; IBaseFilter VideoInput = null; IEnumMoniker classEnum; ICreateDevEnum devEnum = (ICreateDevEnum) new CreateDevEnum(); devEnum.CreateClassEnumerator(FilterCategory.VideoInputDevice, out classEnum, 0); Marshal.ReleaseComObject(devEnum); if (classEnum != null) { IMoniker[] moniker = new IMoniker[1]; if (classEnum.Next(moniker.Length, moniker, IntPtr.Zero) == DEVICE_ID) { Guid iid = typeof(IBaseFilter).GUID; moniker[0].BindToObject(null, null, ref iid, out VideoInputObject); } Marshal.ReleaseComObject(moniker[0]); Marshal.ReleaseComObject(classEnum); VideoInput = (IBaseFilter)VideoInputObject; } if (VideoInput != null) { isRunning = true; SampleGrabber = new SampleGrabber() as ISampleGrabber; GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render"); AMMediaType Type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; SampleGrabber.SetMediaType(Type); GraphBuilder.AddFilter(VideoInput, "Camera"); SampleGrabber.SetBufferSamples(false); SampleGrabber.SetOneShot(false); SampleGrabber.GetConnectedMediaType(new AMMediaType()); SampleGrabber.SetCallback((ISampleGrabberCB)this, 1); CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter); UpdateThread = new Thread(UpdateBuffer); UpdateThread.Start(); MediaControl.Run(); Marshal.ReleaseComObject(VideoInput); } }
public MainForm() { InitializeComponent(); graphbuilder = (IGraphBuilder)new FilterGraph(); samplegrabber = (ISampleGrabber)new SampleGrabber(); graphbuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber"); mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; mt.formatType = FormatType.VideoInfo; samplegrabber.SetMediaType(mt); PrintSeconds(); }
private static void ConfigSampleGrabber(ISampleGrabber sb) { // set the media type var media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; // that's the call to the ISampleGrabber interface sb.SetMediaType(media); DsUtils.FreeAMMediaType(media); }
/// <summary> /// 配置SampleGrabber /// </summary> /// <param name="sampGrabber"></param> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; int hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
public void SetResolution(int width, int height) { this.width = width; this.height = height; object o; CaptureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, VideoInput, typeof(IAMStreamConfig).GUID, out o); var videoStreamConfig = o as IAMStreamConfig; AMMediaType media; int iC = 0, iS = 0; videoStreamConfig.GetNumberOfCapabilities(out iC, out iS); IntPtr ptr = Marshal.AllocCoTaskMem(iS); int streamId = 0; var videoInfo = new VideoInfoHeader(); availableVideoModes.Clear(); for (int i = 0; i < iC; i++) { videoStreamConfig.GetStreamCaps(i, out media, ptr); Marshal.PtrToStructure(media.formatPtr, videoInfo); if (videoInfo.BmiHeader.Width != 0 && videoInfo.BmiHeader.Height != 0) { availableVideoModes.Add(new VideoMode(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height)); } if (videoInfo.BmiHeader.Width != width || videoInfo.BmiHeader.Height != height) { continue; } streamId = i; selectedVideoMode = availableVideoModes.Last(); break; } videoStreamConfig.GetStreamCaps(streamId, out media, ptr); Marshal.PtrToStructure(media.formatPtr, videoInfo); int hr = videoStreamConfig.SetFormat(media); Marshal.FreeCoTaskMem(ptr); media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; SampleGrabber.SetMediaType(media); DsUtils.FreeAMMediaType(media); }
// // configure the SampleGrabber filter of the graph // void ConfigSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; // set the media type. works with "stream" somehow... media = new AMMediaType(); media.majorType = MediaType.Stream; //media.subType = MediaSubType.WAVE; //media.formatType = FormatType.WaveEx; // that's the call to the ISampleGrabber interface sg.SetMediaType(media); DsUtils.FreeAMMediaType(media); media = null; // set BufferCB as the desired Callback function sg.SetCallback(this, 1); }
public CaptureForm() { InitializeComponent(); graph_builder = (IGraphBuilder)new FilterGraph(); media_control = (IMediaControl)graph_builder; events = (IMediaEventEx)graph_builder; grabber = (ISampleGrabber)new SampleGrabber(); AMMediaType media_type = new AMMediaType(); media_type.majorType = MediaType.Video; media_type.subType = MediaSubType.RGB24; grabber.SetMediaType( media_type ); grabber.SetCallback( this, 1 ); cbDevices.Items.AddRange( GetDevices( FilterCategory.VideoInputDevice ) ); }
private static SampleGrabber CreateSampleGrabber(Guid majorType, Guid subType, Guid formatType, SampleGrabberGraph.SampleGrabberCallback.BufferCBEventHandler callback) { SampleGrabber sampleGrabber = new SampleGrabber(); ISampleGrabber grabber = sampleGrabber as ISampleGrabber; grabber.SetMediaType(new AMMediaType { majorType = majorType, subType = subType, formatType = formatType }); grabber.SetBufferSamples(false); grabber.SetOneShot(false); grabber.SetCallback(new SampleGrabberCallback() { OnBuffer = callback }, 1); // 0 = Sample, 1 = Buffer return(sampleGrabber); }
private void SetupSampleGrabber(ISampleGrabber sampleGrabber) { var mediaType = new DirectShowLib.AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; int hr = sampleGrabber.SetMediaType(mediaType); DsUtils.FreeAMMediaType(mediaType); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetCallback(this, 0); DsError.ThrowExceptionForHR(hr); }
private void ConfigureSampleGrabber(ISampleGrabber sampleGrabber, int bufferCB) { int hr; AMMediaType media = new AMMediaType { // Set the media type to Video/RBG24 majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; hr = sampleGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); // Configure the samplegrabber hr = sampleGrabber.SetCallback(this, bufferCB); DsError.ThrowExceptionForHR(hr); }
public CaptureForm() { InitializeComponent(); graph_builder = (IGraphBuilder) new FilterGraph(); media_control = (IMediaControl)graph_builder; events = (IMediaEventEx)graph_builder; grabber = (ISampleGrabber) new SampleGrabber(); AMMediaType media_type = new AMMediaType(); media_type.majorType = MediaType.Video; media_type.subType = MediaSubType.RGB24; grabber.SetMediaType(media_type); grabber.SetCallback(this, 1); cbDevices.Items.AddRange(GetDevices(FilterCategory.VideoInputDevice)); }
protected void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { // Set the media type to Video/RBG24 AMMediaType media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; int hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); // Configure the samplegrabber hr = sampGrabber.SetCallback(new GrabberCB(this), 1); DsError.ThrowExceptionForHR(hr); }
/// <summary> /// Configures the sample grabber /// </summary> /// <param name="sampGrabber"></param> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int status; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; status = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(status); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber status = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(status); }
// Set the options on the sample grabber private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); }
private void InitializeCapture() { graphBuilder = (IGraphBuilder) new FilterGraph(); mediaControl = (IMediaControl)graphBuilder; captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); hr = captureGraphBuilder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); IBaseFilter videoInput = GetVideoInputObject(); if (null != videoInput) { SetConfigurations(videoInput); sampleGrabber = new SampleGrabber() as ISampleGrabber; hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.AddFilter(videoInput, "Camera"); DsError.ThrowExceptionForHR(hr); AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo }; hr = sampleGrabber.SetMediaType(type); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(type); sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.GetConnectedMediaType(new AMMediaType()); sampleGrabber.SetCallback((ISampleGrabberCB)this, 1); hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(videoInput); } }
/// <summary> /// Configure sample grabber filter /// </summary> /// <param name="grabber">Instance of sample grabber</param> private void ConfigureSampleGrabber(ISampleGrabber grabber) { ///Create a media type struct. This will identify the ///type of media acceptable by this filter AMMediaType mediaType = new AMMediaType(); ///The filter will accept video files mediaType.majorType = MediaType.Video; ///RGB24 as we are expecting decompressed frames mediaType.subType = MediaSubType.RGB24; mediaType.formatType = FormatType.VideoInfo; grabber.SetMediaType(mediaType); DsUtils.FreeAMMediaType(mediaType); mediaType = null; ///Callback ISampleGrabberCB.BufferCB method grabber.SetCallback(this, 1); }
public void GetInterfaces() { graph = (IGraphBuilder)(new FilterGraph()); pGraphBuilder = (ICaptureGraphBuilder2)(new CaptureGraphBuilder2()); mediaControl = (IMediaControl)graph; videoWindow = (IVideoWindow)graph; mediaEventEx = (IMediaEventEx)graph; renderFilter = (IBaseFilter) new VideoMixingRenderer9(); pSampleGrabber = new SampleGrabber(); i_grabber = pSampleGrabber as ISampleGrabber; var mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; i_grabber.SetMediaType(mt); //send notification messages to the control window int hr = mediaEventEx.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mt); }
/// <summary> /// Set the options on the sample grabber /// </summary> /// <param name="sampGrabber">The samp grabber.</param> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { int hr; AMMediaType media = new AMMediaType(); // Set the media type to Video/ARBG32 media.majorType = MediaType.Video; media.subType = MediaSubType.ARGB32; //obsolete: //media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber callback hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; media = new AMMediaType(); media.majorType = MediaType.Audio; /*media.subType = MediaSubType.WAVE; * media.formatType = FormatType.WaveEx;*/ hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); // buffer callback (0 = Sample callback) DsError.ThrowExceptionForHR(hr); }
/// <summary>Set up the filter graph for grabbing snapshots</summary> public void EnableGrabbing() { ICaptureGraphBuilder2 icgb2 = null; try { // Get a ICaptureGraphBuilder2 to help build the graph // Link the ICaptureGraphBuilder2 to the IFilterGraph2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; if (icgb2 == null) { throw new Exception("failed to create direct show CaptureGraphBuilder2"); } DsError.ThrowExceptionForHR(icgb2.SetFiltergraph(m_filter_graph)); // Get the SampleGrabber interface m_samp_grabber = (ISampleGrabber) new SampleGrabber(); { // Set the media type to Video/RBG24 AMMediaType media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; try { DsError.ThrowExceptionForHR(m_samp_grabber.SetMediaType(media)); } finally { DsUtils.FreeAMMediaType(media); } } // Configure the sample grabber DsError.ThrowExceptionForHR(m_samp_grabber.SetBufferSamples(true)); // Add the sample graber to the filter graph IBaseFilter grab_filter = (IBaseFilter)m_samp_grabber; DsError.ThrowExceptionForHR(m_filter_graph.AddFilter(grab_filter, "DS.NET Grabber")); } finally { if (icgb2 != null) { Marshal.ReleaseComObject(icgb2); } } }
protected void Initialize() { FrameReady = false; GraphBuilder = (IGraphBuilder) new FilterGraph(); CaptureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); MediaControl = (IMediaControl)GraphBuilder; CaptureGraphBuilder.SetFiltergraph(GraphBuilder); var videoInput = GetVideoInputObjectForCamera(Camera); if (videoInput != null) { isRunning = true; SampleGrabber = new SampleGrabber() as ISampleGrabber; GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render"); var Type = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; SampleGrabber.SetMediaType(Type); GraphBuilder.AddFilter(videoInput, "Camera"); SetCaptureFormat(videoInput); SampleGrabber.SetBufferSamples(false); SampleGrabber.SetOneShot(false); SampleGrabber.GetConnectedMediaType(new AMMediaType()); SampleGrabber.SetCallback(this, 1); CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, SampleGrabber as IBaseFilter); frame = new Texture2D(GraphicsDevice, Width, Height, false, SurfaceFormat.Color); FrameBGR = new byte[Width * Height * 3]; FrameRGBA = new byte[Width * Height * 4]; UpdateThread = new Thread(UpdateBuffer); UpdateThread.IsBackground = true; UpdateThread.Start(); MediaControl.Run(); Marshal.ReleaseComObject(videoInput); } }
void RunWorker() { try { graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; sourceObject = FilterInfo.CreateFilter(deviceMoniker); grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; grabberObject = grabber as IBaseFilter; graph.AddFilter(sourceObject, "source"); graph.AddFilter(grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; grabber.SetMediaType(mediaType); if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (grabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); capGrabber.Width = header.BmiHeader.Width; capGrabber.Height = header.BmiHeader.Height; } } graph.Render(grabberObject.GetPin(PinDirection.Output, 0)); grabber.SetBufferSamples(false); grabber.SetOneShot(false); grabber.SetCallback(capGrabber, 1); IVideoWindow wnd = (IVideoWindow)graph; wnd.put_AutoShow(false); wnd = null; control = (IMediaControl)graph; control.Run(); while (!stopSignal.WaitOne(0, true)) { Thread.Sleep(10); } control.StopWhenReady(); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex); } finally { graph = null; sourceObject = null; grabberObject = null; grabber = null; capGrabber = null; control = null; } }
public void CreateGraph() { try { int result = 0; // フィルタグラフマネージャ作成 graphBuilder = new FilterGraph() as IFilterGraph2; // キャプチャグラフビルダ作成 captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加. result = captureGraphBuilder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(result); // ソースフィルタ作成 // キャプチャデバイスをソースフィルタに対応付ける captureFilter = null; result = graphBuilder.AddSourceFilterForMoniker( _capDevice.Mon, null, _capDevice.Name, out captureFilter); DsError.ThrowExceptionForHR(result); // サンプルグラバ作成 sampleGrabber = new SampleGrabber() as ISampleGrabber; // フィルタと関連付ける IBaseFilter grabFilter = sampleGrabber as IBaseFilter; // キャプチャするオーディオのフォーマットを設定 AMMediaType amMediaType = new AMMediaType(); amMediaType.majorType = MediaType.Audio; amMediaType.subType = MediaSubType.PCM; amMediaType.formatPtr = IntPtr.Zero; result = sampleGrabber.SetMediaType(amMediaType); DsError.ThrowExceptionForHR(result); DsUtils.FreeAMMediaType(amMediaType); // callback 登録 sampleGrabber.SetOneShot(false); DsError.ThrowExceptionForHR(result); result = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(result); // キャプチャするフォーマットを取得 object o; result = captureGraphBuilder.FindInterface( DsGuid.FromGuid(PinCategory.Capture), DsGuid.FromGuid(MediaType.Audio), captureFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(result); IAMStreamConfig config = o as IAMStreamConfig; AMMediaType media; result = config.GetFormat(out media); DsError.ThrowExceptionForHR(result); WaveFormatEx wf = new WaveFormatEx(); Marshal.PtrToStructure(media.formatPtr, wf); CaptureOption opt = new CaptureOption(wf); _sampler = new DSAudioSampler(opt); DsUtils.FreeAMMediaType(media); Marshal.ReleaseComObject(config); result = sampleGrabber.SetCallback(_sampler, 1); DsError.ThrowExceptionForHR(result); //grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加. result = graphBuilder.AddFilter(grabFilter, "Audio Grab Filter"); DsError.ThrowExceptionForHR(result); //キャプチャフィルタをサンプルグラバーフィルタに接続する result = captureGraphBuilder.RenderStream( DsGuid.FromGuid(PinCategory.Capture), DsGuid.FromGuid(MediaType.Audio), captureFilter, null, grabFilter); DsError.ThrowExceptionForHR(result); } catch (Exception ex) { System.Windows.MessageBox.Show(ex.Message); } }
public void SetUpForTs(ISampleGrabberCB grabber, int methodToCall) { FilterGraphTools.DisconnectPins(mpeg2Demux); //FilterGraphTools.DisconnectPins(demodulator); FilterGraphTools.DisconnectPins(audioRenderer); FilterGraphTools.DisconnectPins(videoRenderer); //graphBuilder.RemoveFilter(audioRenderer); //graphBuilder.RemoveFilter(videoRenderer); sampleGrabber = (ISampleGrabber)new SampleGrabber(); AMMediaType media = new AMMediaType(); media.majorType = MediaType.Stream; media.subType = MediaSubType.Mpeg2Transport; media.formatType = FormatType.MpegStreams; sampleGrabber.SetOneShot(false); sampleGrabber.SetBufferSamples(true); int hr = sampleGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Sample Grabber"); nullRenderer = (IBaseFilter)new NullRenderer(); graphBuilder.AddFilter(nullRenderer, "NULL Renderer"); IPin pinIn = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Input"); IPin pinOut = DsFindPin.ByDirection(capture, PinDirection.Output, 0); IEnumMediaTypes eMedia; pinOut.EnumMediaTypes(out eMedia); AMMediaType[] mediaTypes = new AMMediaType[1]; eMedia.Next(mediaTypes.Length, mediaTypes, IntPtr.Zero); hr = sampleGrabber.SetMediaType(mediaTypes[0]); DsError.ThrowExceptionForHR(hr); pinOut.Disconnect(); PinInfo info; pinOut.QueryPinInfo(out info); hr = graphBuilder.ConnectDirect(pinOut, pinIn, mediaTypes[0]); //hr = graphBuilder.Connect(pinOut, pinIn); DsError.ThrowExceptionForHR(hr); // Release the Pin Marshal.ReleaseComObject(pinIn); pinIn = DsFindPin.ByName(nullRenderer, "In"); pinOut = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Output"); hr = graphBuilder.Connect(pinOut, pinIn); DsError.ThrowExceptionForHR(hr); sampleGrabber.SetCallback(grabber, methodToCall); // Release the Pin Marshal.ReleaseComObject(pinIn); pinIn = null; }
private void InitializeCapture() { graphBuilder = (IGraphBuilder)new FilterGraph(); mediaControl = (IMediaControl)graphBuilder; captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); hr = captureGraphBuilder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); IBaseFilter videoInput = GetVideoInputObject(); if (null != videoInput) { SetConfigurations(videoInput); sampleGrabber = new SampleGrabber() as ISampleGrabber; hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.AddFilter(videoInput, "Camera"); DsError.ThrowExceptionForHR(hr); AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo }; hr = sampleGrabber.SetMediaType(type); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(type); sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.GetConnectedMediaType(new AMMediaType()); sampleGrabber.SetCallback((ISampleGrabberCB)this, 1); hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(videoInput); } }
/// <summary> Set the options on the sample grabber </summary> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Choose to call BufferCB instead of SampleCB hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; // Set the media type to Video/RBG24 int hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
private void CaptureVideo(IntPtr ctlHandle) { int hr = 0; IBaseFilter sourceFilter = null; try { // Get DirectShow interfaces GetInterfaces(ctlHandle); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); //captureGraphBuilder.RenderStream(PinCategory.Preview,MediaType.Video, DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = FindCaptureDevice(); if (sourceFilter == null) { log("Couldn't find a video input device."); return; } // Add Capture filter to our graph. hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture"); DsError.ThrowExceptionForHR(hr); this.samplegrabber = (ISampleGrabber)new SampleGrabber(); AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; mt.formatType = FormatType.VideoInfo; samplegrabber.SetMediaType(mt); //samplegrabber. hr = this.graphBuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber"); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRenderer = (IBaseFilter)new NullRenderer(); hr = graphBuilder.AddFilter(nullRenderer, "Null Renderer"); // Render the preview pin on the video capture filter // Use this instead of this.graphBuilder.RenderFile hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (IBaseFilter)samplegrabber, nullRenderer); //DsError.ThrowExceptionForHR(hr); if (hr != 0) log(DsError.GetErrorText(hr)); // Now that the filter has been added to the graph and we have // rendered its stream, we can release this reference to the filter. Marshal.ReleaseComObject(sourceFilter); // Set video window style and position //SetupVideoWindow(ctlHandle); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); // Start previewing video data hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); // Remember current state this.currentState = PlayState.Running; samplegrabber.SetBufferSamples(true); samplegrabber.SetOneShot(false); } catch { MessageBox.Show("CaptureVideo(ctlHandle) suffered a fatal error."); } }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/YUY2 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.YUY2; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
/// <summary> Set the options on the sample grabber </summary> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber, int width, int height) { int hr; AMMediaType media = new AMMediaType(); //VideoInfoHeader v; // copy out the videoinfoheader //v = new VideoInfoHeader(); //Marshal.PtrToStructure(media.formatPtr, v); //// Set the size //v.BmiHeader.Width = width; //v.BmiHeader.Height = height; // Copy the media structure back //Marshal.StructureToPtr(v, media.formatPtr, false); // Set the media type to Video/RBG24 media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; hr = sampGrabber.SetBufferSamples(false); hr = sampGrabber.SetOneShot(false); // Configure the samplegrabber callback hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { var media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; var hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; media = new AMMediaType(); media.majorType = MediaType.Audio; /*media.subType = MediaSubType.WAVE; media.formatType = FormatType.WaveEx;*/ hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); // buffer callback (0 = Sample callback) DsError.ThrowExceptionForHR(hr); }
// --------------------- Private Methods ----------------------- /// <summary> /// Create a new filter graph and add filters (devices, compressors, /// misc), but leave the filters unconnected. Call renderGraph() /// to connect the filters. /// </summary> protected void createGraph() { Guid cat; Guid med; int hr; Type comType = null; object comObj = null; // Ensure required properties are set if ( videoDevice == null && audioDevice == null ) throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" ); // Skip if we are already created if ( (int)graphState < (int)GraphState.Created ) { // Garbage collect, ensure that previous filters are released GC.Collect(); // Make a new filter graph graphBuilder = (IGraphBuilder) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.FilterGraph, true ) ); // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2) DsBugWO.CreateDsInstance( ref clsid, ref riid ); // sampGrabber, ISampleGrabber to capture frames comType=Type.GetTypeFromCLSID( Clsid.SampleGrabber, true ); if(comType==null) throw new NotImplementedException (@"DirectShow SampleGrabber not installed/registered"); comObj=Activator.CreateInstance( comType ); sampGrabber = (ISampleGrabber) comObj; comObj = null; // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph( graphBuilder ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Begin set up of SampGrabber <<<<<<---------------------------------------------------- AMMediaType media = new AMMediaType(); media.majorType= MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType( media ); //if( hr<0 ) Marshal.ThrowExceptionForHR( hr ); // Finish set up of SampGrabber <<<<<<---------------------------------------------------- // Add the graph to the Running Object Table so it can be // viewed with GraphEdit #if DEBUG DsROT.AddGraphToRot( graphBuilder, out rotCookie ); #endif // Get the video device and add it to the filter graph if ( VideoDevice != null ) { videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString ); hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Add SampGrabber Filter <<<<<<---------------------------------------------------- mediaEvt = (IMediaEventEx) graphBuilder; baseGrabFlt = (IBaseFilter) sampGrabber; hr = graphBuilder.AddFilter( baseGrabFlt, "DS.NET Grabber" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio device and add it to the filter graph if ( AudioDevice != null ) { audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString ); hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the video compressor and add it to the filter graph if ( VideoCompressor != null ) { videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString ); hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio compressor and add it to the filter graph if ( AudioCompressor != null ) { audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString ); hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) o = null; } videoStreamConfig = o as IAMStreamConfig; // Retrieve the stream control interface for the audio device o = null; cat = PinCategory.Capture; med = MediaType.Audio ; iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out o ); if ( hr != 0 ) o = null; audioStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) mediaControl = (IMediaControl) graphBuilder; // Reload any video crossbars if ( videoSources != null ) videoSources.Dispose(); videoSources = null; // Reload any audio crossbars if ( audioSources != null ) audioSources.Dispose(); audioSources = null; // Reload any property pages exposed by filters if ( propertyPages != null ) propertyPages.Dispose(); propertyPages = null; // Reload capabilities of video device videoCaps = null; // Reload capabilities of video device audioCaps = null; // Retrieve TV Tuner if available o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVTuner).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) { med = MediaType.Video ; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) o = null; } IAMTVTuner t = o as IAMTVTuner; if ( t != null ) tuner = new Tuner( t ); /* // ----------- VMR 9 ------------------- //## check out samples\inc\vmrutil.h :: RenderFileToVMR9 IBaseFilter vmr = null; if ( ( VideoDevice != null ) && ( previewWindow != null ) ) { vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) ); hr = graphBuilder.AddFilter( vmr, "VMR" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr; hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr; hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } //------------------------------------------- // ---------- SmartTee --------------------- IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) ); hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Video -> SmartTee cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, smartTeeFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> mux cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, muxFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> vmr cat = PinCategory.Preview; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, vmr ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // ------------------------------------- */ // Update the state now that we are done graphState = GraphState.Created; } }
/// <summary> /// Worker thread that captures the images /// </summary> private void RunWorker() { try { // Create the main graph m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString); // Create the grabber m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; m_grabberObject = m_isplGrabber as IBaseFilter; // Add the source and grabber to the main graph m_igrphbldGraph.AddFilter(m_sourceObject, "source"); m_igrphbldGraph.AddFilter(m_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; m_isplGrabber.SetMediaType(mediaType); if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); m_grbrCapGrabber.Width = header.BmiHeader.Width; m_grbrCapGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch (Exception retryException) { // Trace Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0)); m_isplGrabber.SetBufferSamples(false); m_isplGrabber.SetOneShot(false); m_isplGrabber.SetCallback(m_grbrCapGrabber, 1); // Get the video window IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph; wnd.put_AutoShow(false); wnd = null; // Create the control and run m_imedctrlControl = (IMediaControl)m_igrphbldGraph; m_imedctrlControl.Run(); // Wait for the stop signal while (!m_rstevStopSignal.WaitOne(0, true)) { Thread.Sleep(10); } // Stop when ready // _control.StopWhenReady(); m_imedctrlControl.Stop(); // Wait a bit... It apparently takes some time to stop IMediaControl Thread.Sleep(1000); } } catch (Exception ex) { // Trace Trace.WriteLine(ex); } finally { // Clean up this.Release(); } }
/* protected void InitAudioSampleGrabber() { // Get the graph builder IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder == null) return; try { // Build the sample grabber sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true)) as ISampleGrabber; if (sampleGrabber == null) return; // Add it to the filter graph int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber"); DsError.ThrowExceptionForHR(hr); AMMediaType mtAudio = new AMMediaType(); mtAudio.majorType = MediaType.Audio; mtAudio.subType = MediaSubType.PCM; mtAudio.formatPtr = IntPtr.Zero; _actualAudioFormat = null; hr = sampleGrabber.SetMediaType(mtAudio); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetOneShot(false); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); sampleAnalyzerMustStop.Reset(); sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop)); sampleAnalyzerThread.Priority = ThreadPriority.Highest; sampleAnalyzerThread.Start(); } catch(Exception ex) { Logger.LogException(ex); } rotEntry = new DsROTEntry(graphBuilder as IFilterGraph); }*/ protected void InitAudioSampleGrabber_v2() { // Get the graph builder IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder == null) return; try { // Build the sample grabber sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true)) as ISampleGrabber; if (sampleGrabber == null) return; // Add it to the filter graph int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber_v2"); DsError.ThrowExceptionForHR(hr); IBaseFilter ffdAudioDecoder = null; IPin ffdAudioDecoderOutput = null; IPin soundDeviceInput = null; IPin sampleGrabberInput = null; IPin sampleGrabberOutput = null; IntPtr pSoundDeviceInput = IntPtr.Zero; // When using FFDShow, typically we'll find // a ffdshow Audio Decoder connected to the sound device filter // // i.e. [ffdshow Audio Decoder] --> [DirectSound Device] // // Our audio sample grabber supports only PCM sample input and output. // Its entire processing is based on this assumption. // // Thus need to insert the audio sample grabber between the ffdshow Audio Decoder and the sound device // because this is the only place where we can find PCM samples. The sound device only accepts PCM. // // So we need to turn this graph: // // .. -->[ffdshow Audio Decoder]-->[DirectSound Device] // // into this: // // .. -->[ffdshow Audio Decoder]-->[Sample grabber]-->[DirectSound Device] // // Actions to do to achieve the graph change: // // 1. Locate the ffdshow Audio Decoder in the graph // 2. Find its output pin and the pin that it's connected to // 3. Locate the input and output pins of sample grabber // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin) // 5. Connect the ffdshow Audio Decoder to sample grabber input // 6. Connect the sample grabber output to sound device input // that's all. // -------------- // 1. Locate the ffdshow Audio Decoder in the graph hr = graphBuilder.FindFilterByName("ffdshow Audio Decoder", out ffdAudioDecoder); DsError.ThrowExceptionForHR(hr); // 2. Find its output pin and the pin that it's connected to hr = ffdAudioDecoder.FindPin("Out", out ffdAudioDecoderOutput); DsError.ThrowExceptionForHR(hr); hr = ffdAudioDecoderOutput.ConnectedTo(out pSoundDeviceInput); DsError.ThrowExceptionForHR(hr); soundDeviceInput = new DSPin(pSoundDeviceInput).Value; // 3. Locate the input and output pins of sample grabber hr = (sampleGrabber as IBaseFilter).FindPin("In", out sampleGrabberInput); DsError.ThrowExceptionForHR(hr); hr = (sampleGrabber as IBaseFilter).FindPin("Out", out sampleGrabberOutput); DsError.ThrowExceptionForHR(hr); // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin) hr = ffdAudioDecoderOutput.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = soundDeviceInput.Disconnect(); DsError.ThrowExceptionForHR(hr); // 5. Connect the ffdshow Audio Decoder to sample grabber input hr = graphBuilder.Connect(ffdAudioDecoderOutput, sampleGrabberInput); DsError.ThrowExceptionForHR(hr); // 6. Connect the sample grabber output to sound device input hr = graphBuilder.Connect(sampleGrabberOutput, soundDeviceInput); DsError.ThrowExceptionForHR(hr); AMMediaType mtAudio = new AMMediaType(); mtAudio.majorType = MediaType.Audio; mtAudio.subType = MediaSubType.PCM; mtAudio.formatPtr = IntPtr.Zero; _actualAudioFormat = null; sampleGrabber.SetMediaType(mtAudio); sampleGrabber.SetBufferSamples(true); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(this, 1); sampleAnalyzerMustStop.Reset(); sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop)); sampleAnalyzerThread.Priority = ThreadPriority.Highest; sampleAnalyzerThread.Start(); } catch (Exception ex) { Logger.LogException(ex); } rotEntry = new DsROTEntry(graphBuilder as IFilterGraph); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; sampGrabber.SetBufferSamples(false); sampGrabber.SetOneShot(false); hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { int hr; AMMediaType media = new AMMediaType(); // Set the media type media.majorType = MediaType.Video; if (bytes_per_pixel == 1) media.subType = MediaSubType.RGB8; else media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
// --------------------- Private Methods ----------------------- /// <summary> /// Create a new filter graph and add filters (devices, compressors, /// misc), but leave the filters unconnected. Call renderGraph() /// to connect the filters. /// </summary> /// protected void createGraph() { Guid cat; Guid med; int hr; Type comType = null; object comObj = null; // Ensure required properties are set if (videoDevice == null && audioDevice == null) throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n"); // Skip if we are already created if ((int)graphState < (int)GraphState.Created) { // Garbage collect, ensure that previous filters are released GC.Collect(); // Make a new filter graph graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); if (hr < 0) Marshal.ThrowExceptionForHR(hr); comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (comType == null) throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); comObj = Activator.CreateInstance(comType); sampGrabber = (ISampleGrabber)comObj; comObj = null; baseGrabFlt = (IBaseFilter)sampGrabber; // Add the graph to the Running Object Table so it can be // viewed with GraphEdit #if DEBUG DsROT.AddGraphToRot(graphBuilder, out rotCookie); #endif AMMediaType media = new AMMediaType(); // Get the video device and add it to the filter graph if (VideoDevice != null) { videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Console.WriteLine("MediaEnineCheck ==> Inside StartVideoCapture.cs before MediaSudType"); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; //Rajib media.formatType = FormatType.VideoInfo; // ??? hr = sampGrabber.SetMediaType(media); if (hr < 0) Marshal.ThrowExceptionForHR(hr); hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Get the audio device and add it to the filter graph if (AudioDevice != null) { audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(AudioDevice.MonikerString); hr = graphBuilder.AddFilter(audioDeviceFilter, "Audio Capture Device"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Get the video compressor and add it to the filter graph if (VideoCompressor != null) { videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(VideoCompressor.MonikerString); hr = graphBuilder.AddFilter(videoCompressorFilter, "Video Compressor"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Get the audio compressor and add it to the filter graph if (AudioCompressor != null) { audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(AudioCompressor.MonikerString); hr = graphBuilder.AddFilter(audioCompressorFilter, "Audio Compressor"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) o = null; } videoStreamConfig = o as IAMStreamConfig; // Retrieve the stream control interface for the audio device o = null; cat = PinCategory.Capture; med = MediaType.Audio; iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out o); if (hr != 0) o = null; audioStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) mediaControl = (IMediaControl)graphBuilder; // Reload any video crossbars if (videoSources != null) videoSources.Dispose(); videoSources = null; // Reload any audio crossbars if (audioSources != null) audioSources.Dispose(); audioSources = null; // Reload any property pages exposed by filters if (propertyPages != null) propertyPages.Dispose(); propertyPages = null; // Reload capabilities of video device videoCaps = null; // Reload capabilities of video device audioCaps = null; // Retrieve TV Tuner if available o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVTuner).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) { med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) o = null; } IAMTVTuner t = o as IAMTVTuner; if (t != null) tuner = new Tuner(t); videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = sampGrabber.SetBufferSamples(false); if (hr == 0) hr = sampGrabber.SetOneShot(false); if (hr == 0) hr = sampGrabber.SetCallback(new SampleGrabberCallback(), 1); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Update the state now that we are done graphState = GraphState.Created; } }
/// <summary> /// Worker thread that captures the images /// </summary> private void Init() { try { log.Trace("Start worker thread"); // Create the main graph _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source _sourceObject = FilterInfo.CreateFilter(_monikerString); // Create the grabber _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; _grabberObject = _grabber as IBaseFilter; // Add the source and grabber to the main graph _graph.AddFilter(_sourceObject, "source"); _graph.AddFilter(_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; _grabber.SetMediaType(mediaType); if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (_grabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); _capGrabber.Width = header.BmiHeader.Width; _capGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch { // Trace log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0)); _grabber.SetBufferSamples(false); _grabber.SetOneShot(false); _grabber.SetCallback(_capGrabber, 1); log.Trace("_grabber set up"); // Get the video window IVideoWindow wnd = (IVideoWindow)_graph; wnd.put_AutoShow(false); wnd = null; // Create the control and run _control = (IMediaControl)_graph; _control.Run(); log.Trace("control runs"); // Wait for the stop signal //while (!_stopSignal.WaitOne(0, true)) //{ // Thread.Sleep(10); //} } }catch (Exception ex) { // Trace log.Debug(ex); Release(); } }