Exemple #1
0
        public void PauseCapture()
        {
            int hr;

            // Are we capturing?
            if (m_captureDevice != null)
            {
                // disconnect segments
                hr = m_pBridge.BridgeGraphs(null, null);
                DsError.ThrowExceptionForHR(hr);

                // stop capture graph
                IMediaControl pMC = (IMediaControl)m_pCaptureGraph;

                hr = pMC.Pause();
                DsError.ThrowExceptionForHR(hr);

                // disable capture stream (to save resources)
                IAMStreamControlBridge pSC = (IAMStreamControlBridge)m_pCapOutput;

                pSC.StartAt(NEVER, 0); // Ignore any error

                // m_Capturing = false;
            }
        }
Exemple #2
0
        /// <summary>
        /// Configures the DirectShow graph to play the selected video capture
        /// device with the selected parameters
        /// </summary>
        private void SetupGraph()
        {
            /* Clean up any messes left behind */
            FreeResources();

            try
            {
                logger.Info("Graph Setup");
                /* Create a new graph */

                m_pBridge = (IGMFBridgeController) new GMFBridgeController();

                int hr = m_pBridge.AddStream(true, eFormatType.MuxInputs, true);
                DsError.ThrowExceptionForHR(hr);

                m_graph = (IGraphBuilder) new FilterGraphNoThread();

                #if DEBUG
                m_rotEntry = new DsROTEntry(m_graph);
                #endif

                /* Create a capture graph builder to help
                 * with rendering a capture graph */
                var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                hr = m_pBridge.InsertSinkFilter(m_graph, out m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);
                /* Set our filter graph to the capture graph */
                logger.Info("VideoCaptureSource:" + VideoCaptureSource);
                if (VideoCaptureDevice != null)
                {
                    logger.Info("VideoCaptureDevice.DevicePath:" + VideoCaptureDevice.DevicePath);
                }

                /* Add our capture device source to the graph */
                if (m_videoCaptureSourceChanged)
                {
                    m_captureDevice = AddFilterByName(m_graph,
                                                      FilterCategory.VideoInputDevice,
                                                      VideoCaptureSource);

                    m_videoCaptureSourceChanged = false;
                }
                else if (m_videoCaptureDeviceChanged)
                {
                    m_captureDevice = AddFilterByDevicePath(m_graph,
                                                            FilterCategory.VideoInputDevice,
                                                            VideoCaptureDevice.DevicePath);

                    m_videoCaptureDeviceChanged = false;
                }



                /* If we have a null capture device, we have an issue */
                if (m_captureDevice == null)
                {
                    throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource));
                }

                object crossbar;

                var a = graphBuilder.FindInterface(null,
                                                   null,
                                                   m_captureDevice as IBaseFilter,
                                                   typeof(IAMCrossbar).GUID,
                                                   out crossbar);

                if (UseYuv && !EnableSampleGrabbing)
                {
                    /* Configure the video output pin with our parameters and if it fails
                     * then just use the default media subtype*/
                    if (!SetVideoCaptureParameters(graphBuilder, m_captureDevice, MediaSubType.YUY2))
                    {
                        SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty);
                    }
                }
                else
                {
                    /* Configure the video output pin with our parameters */
                    SetVideoCaptureParameters(graphBuilder, m_captureDevice, new Guid("73646976-0000-0010-8000-00AA00389B71"));
                }



                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates a video renderer and register the allocator with the base class */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 1);

                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if (mixer != null && !EnableSampleGrabbing && UseYuv)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;
                        /* Prefer YUV */
                        mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                if (EnableSampleGrabbing)
                {
                    m_sampleGrabber = (ISampleGrabber) new SampleGrabber();
                    SetupSampleGrabber(m_sampleGrabber);
                    hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.SetFiltergraph(m_graph);
                DsError.ThrowExceptionForHR(hr);


                IBaseFilter     mux  = null;
                IFileSinkFilter sink = null;
                if (!string.IsNullOrEmpty(this.fileName))
                {
                }

                hr = graphBuilder.RenderStream(PinCategory.Preview,
                                               MediaType.Video,
                                               m_captureDevice,
                                               null,
                                               m_renderer);



                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, m_captureDevice, null, m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.FindPin(m_captureDevice, PinDirection.Output, PinCategory.Capture, MediaType.Video, false, 0, out m_pCapOutput);
                if (hr >= 0)
                {
                    IAMStreamControlBridge pSC = (IAMStreamControlBridge)m_pCapOutput;
                    pSC.StartAt(NEVER, 0);  // Ignore any error
                }


                /* Register the filter graph
                 * with the base classes */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);


                HasVideo = true;

                /* Make sure we Release() this COM reference */
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                }
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                }

                Marshal.ReleaseComObject(graphBuilder);
            }
            catch (Exception ex)
            {
                /* Something got fuct up */
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            /* Success */
            InvokeMediaOpened();
        }