/// <summary> /// Set the video capabilities. /// </summary> /// <param name="bldr">Specifies the capture builder</param> /// <param name="flt">Specifies the video filter.</param> /// <param name="vidCap">Specifies the desired capabilities.</param> /// <returns><i>true</i> is returned if set, otherwise <i>false</i>.</returns> /// <remarks> /// @see http://blog.dvdbuilder.com/setting-video-capture-format-directshow-net /// </remarks> private bool setVideoCapabilities(ICaptureGraphBuilder2 bldr, IBaseFilter flt, VideoCapability vidCap) { int hr; Guid cat = PinCategory.Capture; Guid type = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; object comObj = null; IntPtr pSC = IntPtr.Zero; AMMediaType mt = null; try { hr = bldr.FindInterface(ref cat, ref type, flt, ref iid, out comObj); if (hr != 0) { type = MediaType.Video; hr = bldr.FindInterface(ref cat, ref type, flt, ref iid, out comObj); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IAMStreamConfig cfg = comObj as IAMStreamConfig; int nCount; int nSize; hr = cfg.GetNumberOfCapabilities(out nCount, out nSize); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } VideoInfoHeader vih = new VideoInfoHeader(); VideoStreamConfigCaps vsc = new VideoStreamConfigCaps(); pSC = Marshal.AllocCoTaskMem(nSize); for (int i = 0; i < nCount; i++) { mt = null; IntPtr pMT; hr = cfg.GetStreamCaps(i, out pMT, pSC); if (hr == 0) { mt = Marshal.PtrToStructure <AMMediaType>(pMT); Marshal.PtrToStructure(mt.formatPtr, vih); Marshal.PtrToStructure(pSC, vsc); int nMinFps = (int)(10000000 / vsc.MaxFrameInterval); int nMaxFps = (int)(10000000 / vsc.MinFrameInterval); if ((vih.BmiHeader.Width == vidCap.Width || vidCap.Width == 0) && (vih.BmiHeader.Height == vidCap.Height || vidCap.Height == 0) && ((nMinFps <= vidCap.TargetFPS && nMaxFps >= vidCap.TargetFPS) || vidCap.TargetFPS == 0)) { break; } } if (mt != null) { Marshal.FreeCoTaskMem(mt.formatPtr); mt = null; } } if (mt == null) { return(false); } cfg.SetFormat(mt); } catch (Exception excpt) { return(false); } finally { if (comObj != null) { Marshal.ReleaseComObject(comObj); } if (pSC != IntPtr.Zero) { Marshal.FreeCoTaskMem(pSC); } if (mt != null) { Marshal.FreeCoTaskMem(mt.formatPtr); } } return(true); }
/// <summary> /// Open a new video feed (either web-cam or video file). /// </summary> /// <param name="filter">Specifies the web-cam filter to use, or <i>null</i> when opening a video file.</param> /// <param name="pb">Specifies the output window, or <i>null</i> when running headless and only receiving snapshots.</param> /// <param name="strFile">Specifies the video file to use, or <i>null</i> when opening a web-cam feed.</param> /// <param name="vidCap">Optionally specifies the video capabilities to use, or <i>null</i> to ignore and use the default video capabilities.</param> /// <returns>The duration (if any) is returned, or 0.</returns> /// <remarks>To get the video capabilities see the GetVideoCapatiblities method.</remarks> public long Open(Filter filter, PictureBox pb, string strFile, VideoCapability vidCap = null) { int hr; if (filter != null && strFile != null) { throw new ArgumentException("Both the filter and file are non NULL - only one of these can be used at a time; The filter is used with the web-cam and the file is used with a video file."); } m_selectedFilter = filter; m_graphBuilder = (IFilterGraph2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // When using a web-cam, create the moniker for the filter and add the filter to the graph. if (strFile == null) { IMoniker moniker = m_selectedFilter.CreateMoniker(); m_graphBuilder.AddSourceFilterForMoniker(moniker, null, m_selectedFilter.Name, out m_camFilter); Marshal.ReleaseComObject(moniker); m_camControl = m_camFilter as IAMCameraControl; // Create the capture builder used to build the web-cam filter graph. m_captureGraphBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2, true)); hr = m_captureGraphBuilder.SetFiltergraph(m_graphBuilder as IGraphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Add the web-cam filter to the graph. hr = m_graphBuilder.AddFilter(m_camFilter, m_selectedFilter.Name); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set the desired video capabilities. if (vidCap != null) { setVideoCapabilities(m_captureGraphBuilder, m_camFilter, vidCap); } } else { // Build the graph with the video file. hr = m_graphBuilder.RenderFile(strFile, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } m_mediaSeek = m_graphBuilder as IMediaSeeking; if (pb != null) { m_videoFrameStep = m_graphBuilder as IVideoFrameStep; } } // Create the sample grabber used to get snapshots. m_sampleGrabber = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); m_baseGrabFilter = m_sampleGrabber as IBaseFilter; m_mediaControl = m_graphBuilder as IMediaControl; // When using a target window, get the video window used with the target output window if (pb != null) { m_mediaEventEx = m_graphBuilder as IMediaEventEx; m_videoWindow = m_graphBuilder as IVideoWindow; } // Otherwise create the null renderer for no video output is needed (only snapshots). else { m_nullRenderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.NullRenderer, true)); } // Add the sample grabber to the filter graph. hr = m_graphBuilder.AddFilter(m_baseGrabFilter, "Ds.Lib Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn off the sample grabber buffers. hr = m_sampleGrabber.SetBufferSamples(false); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn off the sample grabber one-shot. hr = m_sampleGrabber.SetOneShot(false); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn ON the sample grabber callback where video data is to be received. hr = m_sampleGrabber.SetCallback(this, 1); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set the media format used by the sample grabber. AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = m_sampleGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the WebCam Filters and Frame Grabber. if (m_selectedFilter != null) { Guid cat; Guid med; cat = PinCategory.Preview; med = MediaType.Video; hr = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } cat = PinCategory.Capture; med = MediaType.Video; hr = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, m_baseGrabFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Connect the Frame Grabber and (optionally the Null Renderer) else { // Get the video decoder and its pins. m_videoFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Decoder", false); IPin pOutput; hr = Utility.GetPin(m_videoFilter, PinDirection.Output, out pOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin pInput; hr = pOutput.ConnectedTo(out pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } PinInfo pinInfo; hr = pInput.QueryPinInfo(out pinInfo); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the sample grabber pins. IPin pGrabInput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Input, out pGrabInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin pGrabOutput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Disconnect the source filter output and the input it is connected to. hr = pOutput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = pInput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the source output to the Grabber input. hr = m_graphBuilder.Connect(pOutput, pGrabInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // When rendering video output, connect the Grabber output to the original downstream input that the source was connected to. if (m_nullRenderer == null) { hr = m_graphBuilder.Connect(pGrabOutput, pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } Marshal.ReleaseComObject(pOutput); Marshal.ReleaseComObject(pInput); Marshal.ReleaseComObject(pGrabInput); Marshal.ReleaseComObject(pGrabOutput); } // Remove sound filters. IBaseFilter soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Audio Decoder", false); if (soundFilter != null) { hr = m_graphBuilder.RemoveFilter(soundFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(soundFilter); } soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Sound", false); if (soundFilter != null) { hr = m_graphBuilder.RemoveFilter(soundFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(soundFilter); } // When using a headless (no video rendering) setup, connect the null renderer to the Sample Grabber. if (m_nullRenderer != null) { // Add the null renderer. hr = m_graphBuilder.AddFilter(m_nullRenderer, "Null Renderer"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the sample grabber output pin. IPin pGrabOutput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the null renderer input pin. IPin pInput; hr = Utility.GetPin(m_nullRenderer, PinDirection.Input, out pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Disconnect the sample grabber pin. hr = pGrabOutput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the Grabber output to the null renderer. hr = m_graphBuilder.Connect(pGrabOutput, pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(pInput); Marshal.ReleaseComObject(pGrabOutput); // Remove the Video Renderer for it is no longer needed. IBaseFilter ivideorender = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Renderer"); if (ivideorender != null) { m_graphBuilder.RemoveFilter(ivideorender); Marshal.ReleaseComObject(ivideorender); } } // Get the sample grabber media settings and video header. media = new AMMediaType(); hr = m_sampleGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo && media.formatType != FormatType.WaveEx && media.formatType != FormatType.MpegVideo) || media.formatPtr == IntPtr.Zero) { throw new Exception("Media grabber format is unknown."); } // Get the video header with frame sizing information. m_videoInfoHeader = Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)) as VideoInfoHeader; Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; // If we are rendering video output, setup the video window (which requires a message pump). if (m_videoWindow != null) { // setup the video window hr = m_videoWindow.put_Owner(pb.Handle); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = m_videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // resize the window hr = m_videoWindow.SetWindowPosition(0, 0, pb.Width, pb.Height); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = m_videoWindow.put_Visible(DsHlp.OATRUE); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Subscribe to the picturebox size changed event. pb.SizeChanged += Pb_SizeChanged; } // start the capturing hr = m_mediaControl.Run(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // When using a video file, immediately stop at the start. if (strFile != null) { hr = m_mediaControl.Pause(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // When using a media file, we need to save the video file's duration. if (m_mediaSeek != null) { hr = m_mediaSeek.GetDuration(out m_lDuration); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } m_bConnected = true; return(m_lDuration); }