Пример #1
0
        public ClipPlayer(IntPtr hwnd, int msgSegment, int msgEvent)
        {
            m_bLoop = false;
            m_tDuration = 0;
            m_tStartPosition = 0;
            m_bActive = false;
            m_hwndApp = hwnd;
            m_msgEvent = msgEvent;
            m_msgSegment = msgSegment;
            m_pPlayNext = -1;

            m_Clips = new ArrayList(5);

            m_pController = new GMFBridgeController() as IGMFBridgeController;

            m_pController.SetNotify(hwnd, msgSegment);

            // we use a video and an audio stream,
            // options:
            //don't allow compressed in source graphs,
            //don't discard when not connected
            m_pController.AddStream(true, eFormatType.Uncompressed, false);
            m_pController.AddStream(false, eFormatType.Uncompressed, false);

            // increase buffering at the join, so that audio does not run out
            m_pController.SetBufferMinimum(200);

            m_itCurrent = m_Clips.Count;
        }
Пример #2
0
        public ClipPlayer(IntPtr hwnd, int msgSegment, int msgEvent)
        {
            m_bLoop          = false;
            m_tDuration      = 0;
            m_tStartPosition = 0;
            m_bActive        = false;
            m_hwndApp        = hwnd;
            m_msgEvent       = msgEvent;
            m_msgSegment     = msgSegment;
            m_pPlayNext      = -1;

            m_Clips = new ArrayList(5);

            m_pController = new GMFBridgeController() as IGMFBridgeController;

            m_pController.SetNotify(hwnd, msgSegment);

            // we use a video and an audio stream,
            // options:
            //don't allow compressed in source graphs,
            //don't discard when not connected
            m_pController.AddStream(true, eFormatType.Uncompressed, false);
            m_pController.AddStream(false, eFormatType.Uncompressed, false);

            // increase buffering at the join, so that audio does not run out
            m_pController.SetBufferMinimum(200);

            m_itCurrent = m_Clips.Count;
        }
Пример #3
0
        /// <summary>
        /// Determines what streams are available on the Net Demux.
        /// Creates channels in the GMF Bridge controller accordingly.
        /// Then, creates the GMF Bridge Sink, and connects the streams to their respective pins.
        /// </summary>
        private void RenderNetDemux()
        {
            List <DetailPinInfo> pins = null;

            try
            {
                //fetch all pins on this filter
                pins = netDemux.EnumPinsDetails();

                //create list of pins we care about
                List <IPin> demuxPins = new List <IPin>();

                //get output pins of type video or audio
                foreach (DetailPinInfo i in pins)
                {
                    if (i.Info.dir == PinDirection.Output)
                    {
                        if (i.Type.majorType == MediaType.Video)
                        {
                            controller.AddStream(1, eFormatType.eAny, 1);
                            demuxPins.Add(i.Pin);
                        }
                        else if (i.Type.majorType == MediaType.Audio)
                        {
                            controller.AddStream(0, eFormatType.eAny, 1);
                            demuxPins.Add(i.Pin);
                        }
                    }
                }

                //create GMF Sink
                output = (IBaseFilter)controller.InsertSinkFilter(graph);
                //connect Demux to GMF Sink
                for (int i = 0; i < demuxPins.Count; i++)
                {
                    IPin sinkPin;
                    int  hr = output.FindPin("Input " + (i + 1).ToString(), out sinkPin);
                    if (hr == 0)
                    {
                        FilterGraphTools.ConnectFilters(graph, demuxPins[i], sinkPin, false);
                        Marshal.ReleaseComObject(sinkPin);
                    }
                }
            }
            catch (Exception ex)
            {
                Release(output);
                output = null;
                throw ex;
            }
            finally
            {
                if (pins != null)
                {
                    pins.Release();
                }
            }
        }
Пример #4
0
        public int AddClip(string path, out ClipEntry pClip)
        {
            int it = m_Clips.Count;

            pClip = new ClipEntry();
            m_Clips.Add(pClip);

            int hr = pClip.Create(m_pController, path);

            // if we expect both audio and video, then all clips
            // must have both audio and video.
            // If the first clip is video only, then switch
            // to video-only automatically
            if ((hr == VFW_E_UNSUPPORTED_AUDIO) && (m_Clips.Count == 1))
            {
                // new controller, different options (only one video stream)
                if (m_pController != null)
                {
                    Marshal.ReleaseComObject(m_pController);
                    m_pController = null;
                }
                m_pController = new GMFBridgeController() as IGMFBridgeController;
                m_pController.SetNotify(m_hwndApp, m_msgSegment);
                m_pController.AddStream(true, eFormatType.Uncompressed, false);
                m_pController.SetBufferMinimum(200);

                // try again
                hr = pClip.Create(m_pController, path);
            }

            if (hr >= 0)
            {
                pClip.SetStartPosition(m_tDuration);
                m_tDuration += pClip.Duration();

                // if this is the first clip, create the render graph
                if (m_Clips.Count == 1)
                {
                    m_pRenderGraph = new FilterGraph() as IGraphBuilder;
                    hr             = m_pController.CreateRenderGraph(pClip.SinkFilter(), m_pRenderGraph, out m_pRenderGraphSourceFilter);
                    if (hr >= 0 && m_hwndApp != IntPtr.Zero)
                    {
                        IMediaEventEx pME = m_pRenderGraph as IMediaEventEx;
                        if (pME != null)
                        {
                            pME.SetNotifyWindow(m_hwndApp, m_msgEvent, IntPtr.Zero);
                        }
                    }
                }
            }
            else
            {
                pClip.Dispose();
                m_Clips.RemoveAt(it);
            }

            return(hr);
        }
Пример #5
0
        /// <summary>
        /// Builds the actual DirectShow graph and performs the connection to the URL specified in the constructor.
        /// </summary>
        public void Setup()
        {
            lock (instanceMutex)
            {
                int hr;

                // An exception is thrown if cast fail

                //create playback graph
                _graphBuilder        = (IGraphBuilder) new FilterGraph();
                _captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
                _mediaControl        = (IMediaControl)_graphBuilder;

                // Attach the filter graph to the capture graph
                hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder);
                DsError.ThrowExceptionForHR(hr);

                //create bridge controller
                _bridgeController = new GMFBridgeControllerClass();
                //add a video stream to the bridge
                //1 = Video stream 0 = Audio stream, format type, 1 = dispose stream going into disconnected sink 0 = buffer stream going into disconnected sink
                _bridgeController.AddStream(1, eFormatType.eAny, 1);

                AddLeadNetSrc();
                AddLeadNetDemux();
                ConnectFilters(_graphBuilder, _netSrc, "Output", _netDmx, "Input 01", true);

                //insert Infinite Pin Tee into playback graph
                _infPinTee = AddFilterByName(_graphBuilder, FilterCategory.LegacyAmFilterCategory, "Infinite Pin Tee Filter");

                //add a bridge sink to the playback graph
                _bridgeSink = (IBaseFilter)_bridgeController.InsertSinkFilter(_graphBuilder);

                //finish building playback/source graph, so we can negotiate data types on bridge
                AddVideoRender();
                RenderNetDemux();

                _telemetryTimer = new System.Threading.Timer(new System.Threading.TimerCallback(TelemetryTimer_Tick));
            }
        }
Пример #6
0
        // Specify a device, and a window to draw the preview in
        public void SelectDevice(DsDevice dev, IntPtr hwnd)
        {
            int hr;
            IBaseFilter pfDevice = null;
            ICaptureGraphBuilder2 pBuilder = null;

            // release any leftovers
            ReleaseSelectMembers();

            try
            {
                // create source graph and add sink filter
                m_pSourceGraph = (IGraphBuilder)new FilterGraph();
                m_rot1 = new DsROTEntry(m_pSourceGraph);

                m_pBridge = (IGMFBridgeController)new GMFBridgeController();

                // init to video-only, in discard mode (ie when source graph
                // is running but not connected, buffers are discarded at the bridge)
                hr = m_pBridge.AddStream(true, eFormatType.MuxInputs, true);
                DsError.ThrowExceptionForHR(hr);

                // Add the requested device
                hr = ((IFilterGraph2)m_pSourceGraph).AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out pfDevice);
                DsError.ThrowExceptionForHR(hr);

                // Add the sink filter to the source graph
                hr = m_pBridge.InsertSinkFilter(m_pSourceGraph, out m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);

                // use capture graph builder to render preview
                pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

                // Init the CaptureGraphBuilder2
                hr = pBuilder.SetFiltergraph(m_pSourceGraph);
                DsError.ThrowExceptionForHR(hr);

                // Connect the filters together to allow preview
                hr = pBuilder.RenderStream(PinCategory.Preview, MediaType.Video, pfDevice, null, null);
                DsError.ThrowExceptionForHR(hr);

                // connect capture output to the pseudo-sink filter,
                // where it will be discarded until required
                hr = pBuilder.RenderStream(PinCategory.Capture, MediaType.Video, pfDevice, null, m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);

                // turn off capture stream if possible except when capturing
                hr = pBuilder.FindPin(pfDevice, PinDirection.Output, PinCategory.Capture, MediaType.Video, false, 0, out m_pCapOutput);
                if (hr >= 0)
                {
                    IAMStreamControl pSC = (IAMStreamControl)m_pCapOutput;
                    pSC.StartAt(NEVER, 0);  // Ignore any error
                }

                ConfigureVideo(hwnd);

                IMediaControl pMC = (IMediaControl)m_pSourceGraph;

                hr = pMC.Run();
                DsError.ThrowExceptionForHR(hr);

                // If we made it here, the device is selected
                m_DeviceSelected = true;
            }
            catch
            {
                ReleaseSelectMembers();
                throw;
            }
            finally
            {
                if (pBuilder != null)
                {
                    Marshal.ReleaseComObject(pBuilder);
                }

                if (pfDevice != null)
                {
                    Marshal.ReleaseComObject(pfDevice);
                }
            }
        }
Пример #7
0
        public int AddClip(string path, out ClipEntry pClip)
        {
            int it = m_Clips.Count;

            pClip = new ClipEntry();
            m_Clips.Add(pClip);

            int hr = pClip.Create(m_pController, path);

            // if we expect both audio and video, then all clips
            // must have both audio and video.
            // If the first clip is video only, then switch
            // to video-only automatically
            if ((hr == VFW_E_UNSUPPORTED_AUDIO) && (m_Clips.Count == 1))
            {
                // new controller, different options (only one video stream)
                if (m_pController != null)
                {
                    Marshal.ReleaseComObject(m_pController);
                    m_pController = null;
                }
                m_pController = new GMFBridgeController() as IGMFBridgeController;
                m_pController.SetNotify(m_hwndApp, m_msgSegment);
                m_pController.AddStream(true, eFormatType.Uncompressed, false);
                m_pController.SetBufferMinimum(200);

                // try again
                hr = pClip.Create(m_pController, path);
            }

            if (hr >= 0)
            {
                pClip.SetStartPosition(m_tDuration);
                m_tDuration += pClip.Duration();

                // if this is the first clip, create the render graph
                if (m_Clips.Count == 1)
                {
                    m_pRenderGraph = new FilterGraph() as IGraphBuilder;
                    hr = m_pController.CreateRenderGraph(pClip.SinkFilter(), m_pRenderGraph, out m_pRenderGraphSourceFilter);
                    if (hr >= 0 && m_hwndApp != IntPtr.Zero)
                    {
                        IMediaEventEx pME = m_pRenderGraph as IMediaEventEx;
                        if (pME != null)
                        {
                            pME.SetNotifyWindow(m_hwndApp, m_msgEvent, IntPtr.Zero);
                        }
                    }
                }
            }
            else
            {
                pClip.Dispose();
                m_Clips.RemoveAt(it);
            }

            return hr;
        }
Пример #8
0
        /// <summary>
        /// Configures the DirectShow graph to play the selected video capture
        /// device with the selected parameters
        /// </summary>
        private void SetupGraph()
        {
            /* Clean up any messes left behind */
            FreeResources();

            try
            {
                logger.Info("Graph Setup");
                /* Create a new graph */

                m_pBridge = (IGMFBridgeController)new GMFBridgeController();

                int hr = m_pBridge.AddStream(true, eFormatType.MuxInputs, true);
                DsError.ThrowExceptionForHR(hr);

                m_graph = (IGraphBuilder)new FilterGraphNoThread();

                #if DEBUG
                    m_rotEntry = new DsROTEntry(m_graph);
                #endif

                /* Create a capture graph builder to help
                 * with rendering a capture graph */
                var graphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

                hr = m_pBridge.InsertSinkFilter(m_graph, out m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);
                /* Set our filter graph to the capture graph */
                logger.Info("VideoCaptureSource:" + VideoCaptureSource);
                if (VideoCaptureDevice != null)
                {
                    logger.Info("VideoCaptureDevice.DevicePath:" + VideoCaptureDevice.DevicePath);
                }

                /* Add our capture device source to the graph */
                if (m_videoCaptureSourceChanged)
                {
                    m_captureDevice = AddFilterByName(m_graph,
                                                      FilterCategory.VideoInputDevice,
                                                      VideoCaptureSource);

                    m_videoCaptureSourceChanged = false;

                }
                else if (m_videoCaptureDeviceChanged)
                {
                    m_captureDevice = AddFilterByDevicePath(m_graph,
                                                            FilterCategory.VideoInputDevice,
                                                            VideoCaptureDevice.DevicePath);

                    m_videoCaptureDeviceChanged = false;
                }

                /* If we have a null capture device, we have an issue */
                if (m_captureDevice == null)
                    throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource));

                object crossbar;

                var a = graphBuilder.FindInterface(null,
                                            null,
                                            m_captureDevice as IBaseFilter,
                                            typeof(IAMCrossbar).GUID,
                                            out crossbar);

                if(UseYuv && !EnableSampleGrabbing)
                {
                    /* Configure the video output pin with our parameters and if it fails
                     * then just use the default media subtype*/
                    if(!SetVideoCaptureParameters(graphBuilder, m_captureDevice, MediaSubType.YUY2))
                        SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty);

                }
                else
                    /* Configure the video output pin with our parameters */
                    SetVideoCaptureParameters(graphBuilder, m_captureDevice, new Guid("73646976-0000-0010-8000-00AA00389B71"));

                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates a video renderer and register the allocator with the base class */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 1);

                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if (mixer != null && !EnableSampleGrabbing && UseYuv)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;
                        /* Prefer YUV */
                        mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                if (EnableSampleGrabbing)
                {
                    m_sampleGrabber = (ISampleGrabber)new SampleGrabber();
                    SetupSampleGrabber(m_sampleGrabber);
                    hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.SetFiltergraph(m_graph);
                DsError.ThrowExceptionForHR(hr);

                IBaseFilter mux = null;
                IFileSinkFilter sink = null;
                if (!string.IsNullOrEmpty(this.fileName))
                {
                }

                hr = graphBuilder.RenderStream(PinCategory.Preview,
                                               MediaType.Video,
                                               m_captureDevice,
                                               null,
                                               m_renderer);

                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, m_captureDevice, null, m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.FindPin(m_captureDevice, PinDirection.Output, PinCategory.Capture, MediaType.Video, false, 0, out m_pCapOutput);
                if (hr >= 0)
                {
                    IAMStreamControlBridge pSC = (IAMStreamControlBridge)m_pCapOutput;
                    pSC.StartAt(NEVER, 0);  // Ignore any error
                }

                /* Register the filter graph
                 * with the base classes */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);

                HasVideo = true;

                /* Make sure we Release() this COM reference */
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                }
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                }

                Marshal.ReleaseComObject(graphBuilder);
            }
            catch (Exception ex)
            {
                /* Something got fuct up */
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            /* Success */
            InvokeMediaOpened();
        }
Пример #9
0
        /// <summary>
        /// Configures the DirectShow graph to play the selected video capture
        /// device with the selected parameters
        /// </summary>
        private void SetupGraph()
        {
            /* Clean up any messes left behind */
            FreeResources();

            try
            {
                logger.Info("Graph Setup");
                /* Create a new graph */

                m_pBridge = (IGMFBridgeController) new GMFBridgeController();

                int hr = m_pBridge.AddStream(true, eFormatType.MuxInputs, true);
                DsError.ThrowExceptionForHR(hr);

                m_graph = (IGraphBuilder) new FilterGraphNoThread();

                #if DEBUG
                m_rotEntry = new DsROTEntry(m_graph);
                #endif

                /* Create a capture graph builder to help
                 * with rendering a capture graph */
                var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                hr = m_pBridge.InsertSinkFilter(m_graph, out m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);
                /* Set our filter graph to the capture graph */
                logger.Info("VideoCaptureSource:" + VideoCaptureSource);
                if (VideoCaptureDevice != null)
                {
                    logger.Info("VideoCaptureDevice.DevicePath:" + VideoCaptureDevice.DevicePath);
                }

                /* Add our capture device source to the graph */
                if (m_videoCaptureSourceChanged)
                {
                    m_captureDevice = AddFilterByName(m_graph,
                                                      FilterCategory.VideoInputDevice,
                                                      VideoCaptureSource);

                    m_videoCaptureSourceChanged = false;
                }
                else if (m_videoCaptureDeviceChanged)
                {
                    m_captureDevice = AddFilterByDevicePath(m_graph,
                                                            FilterCategory.VideoInputDevice,
                                                            VideoCaptureDevice.DevicePath);

                    m_videoCaptureDeviceChanged = false;
                }



                /* If we have a null capture device, we have an issue */
                if (m_captureDevice == null)
                {
                    throw new Exception(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource));
                }

                object crossbar;

                var a = graphBuilder.FindInterface(null,
                                                   null,
                                                   m_captureDevice as IBaseFilter,
                                                   typeof(IAMCrossbar).GUID,
                                                   out crossbar);

                if (UseYuv && !EnableSampleGrabbing)
                {
                    /* Configure the video output pin with our parameters and if it fails
                     * then just use the default media subtype*/
                    if (!SetVideoCaptureParameters(graphBuilder, m_captureDevice, MediaSubType.YUY2))
                    {
                        SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty);
                    }
                }
                else
                {
                    /* Configure the video output pin with our parameters */
                    SetVideoCaptureParameters(graphBuilder, m_captureDevice, new Guid("73646976-0000-0010-8000-00AA00389B71"));
                }



                var rendererType = VideoRendererType.VideoMixingRenderer9;

                /* Creates a video renderer and register the allocator with the base class */
                m_renderer = CreateVideoRenderer(rendererType, m_graph, 1);

                if (rendererType == VideoRendererType.VideoMixingRenderer9)
                {
                    var mixer = m_renderer as IVMRMixerControl9;

                    if (mixer != null && !EnableSampleGrabbing && UseYuv)
                    {
                        VMR9MixerPrefs dwPrefs;
                        mixer.GetMixingPrefs(out dwPrefs);
                        dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask;
                        dwPrefs |= VMR9MixerPrefs.RenderTargetYUV;
                        /* Prefer YUV */
                        mixer.SetMixingPrefs(dwPrefs);
                    }
                }

                if (EnableSampleGrabbing)
                {
                    m_sampleGrabber = (ISampleGrabber) new SampleGrabber();
                    SetupSampleGrabber(m_sampleGrabber);
                    hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber");
                    DsError.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.SetFiltergraph(m_graph);
                DsError.ThrowExceptionForHR(hr);


                IBaseFilter     mux  = null;
                IFileSinkFilter sink = null;
                if (!string.IsNullOrEmpty(this.fileName))
                {
                }

                hr = graphBuilder.RenderStream(PinCategory.Preview,
                                               MediaType.Video,
                                               m_captureDevice,
                                               null,
                                               m_renderer);



                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, m_captureDevice, null, m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.FindPin(m_captureDevice, PinDirection.Output, PinCategory.Capture, MediaType.Video, false, 0, out m_pCapOutput);
                if (hr >= 0)
                {
                    IAMStreamControlBridge pSC = (IAMStreamControlBridge)m_pCapOutput;
                    pSC.StartAt(NEVER, 0);  // Ignore any error
                }


                /* Register the filter graph
                 * with the base classes */
                SetupFilterGraph(m_graph);

                /* Sets the NaturalVideoWidth/Height */
                SetNativePixelSizes(m_renderer);


                HasVideo = true;

                /* Make sure we Release() this COM reference */
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                }
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                }

                Marshal.ReleaseComObject(graphBuilder);
            }
            catch (Exception ex)
            {
                /* Something got fuct up */
                FreeResources();
                InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex));
            }

            /* Success */
            InvokeMediaOpened();
        }
Пример #10
0
        SelectDevice(DsDevice dev, IntPtr hwnd)
        {
            int                   hr;
            IBaseFilter           pfDevice = null;
            ICaptureGraphBuilder2 pBuilder = null;

            // release any leftovers
            ReleaseSelectMembers();

            try
            {
                // create source graph and add sink filter
                m_pSourceGraph = (IGraphBuilder) new FilterGraph();
                m_rot1         = new DsROTEntry(m_pSourceGraph);

                m_pBridge = (IGMFBridgeController) new GMFBridgeController();

                // init to video-only, in discard mode (ie when source graph
                // is running but not connected, buffers are discarded at the bridge)
                hr = m_pBridge.AddStream(true, eFormatType.MuxInputs, true);
                DsError.ThrowExceptionForHR(hr);

                // Add the requested device
                hr = ((IFilterGraph2)m_pSourceGraph).AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out pfDevice);
                DsError.ThrowExceptionForHR(hr);

                // Add the sink filter to the source graph
                hr = m_pBridge.InsertSinkFilter(m_pSourceGraph, out m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);

                // use capture graph builder to render preview
                pBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Init the CaptureGraphBuilder2
                hr = pBuilder.SetFiltergraph(m_pSourceGraph);
                DsError.ThrowExceptionForHR(hr);

                // Connect the filters together to allow preview
                hr = pBuilder.RenderStream(PinCategory.Preview, MediaType.Video, pfDevice, null, null);
                DsError.ThrowExceptionForHR(hr);

                // connect capture output to the pseudo-sink filter,
                // where it will be discarded until required
                hr = pBuilder.RenderStream(PinCategory.Capture, MediaType.Video, pfDevice, null, m_pSourceGraphSinkFilter);
                DsError.ThrowExceptionForHR(hr);

                // turn off capture stream if possible except when capturing
                hr = pBuilder.FindPin(pfDevice, PinDirection.Output, PinCategory.Capture, MediaType.Video, false, 0, out m_pCapOutput);
                if (hr >= 0)
                {
                    IAMStreamControl pSC = (IAMStreamControl)m_pCapOutput;
                    pSC.StartAt(NEVER, 0);  // Ignore any error
                }

                ConfigureVideo(hwnd);

                IMediaControl pMC = (IMediaControl)m_pSourceGraph;

                hr = pMC.Run();
                DsError.ThrowExceptionForHR(hr);

                // If we made it here, the device is selected
                m_DeviceSelected = true;
            }
            catch
            {
                ReleaseSelectMembers();
                throw;
            }
            finally
            {
                if (pBuilder != null)
                {
                    Marshal.ReleaseComObject(pBuilder);
                }

                if (pfDevice != null)
                {
                    Marshal.ReleaseComObject(pfDevice);
                }
            }
        }