Example #1
0
        /// <summary>
        /// サンプルグラバーの生成
        /// </summary>
        /// <returns>
        ///		生成されたサンプルグラバーを返します。
        ///	</returns>
        internal virtual ISampleGrabber CreateSampleGrabber()
        {
            ISampleGrabber grabber = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_SampleGrabber)));

            if (grabber != null)
            {
                // サンプルグラバフィルタの入力形式設定.
                // SetMediaType で必要なメディア タイプを指定します。
                //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE();
                media_type.majortype  = new Guid(GUID.MEDIATYPE_Video);                 // Video
                media_type.subtype    = new Guid(GUID.MEDIASUBTYPE_RGB24);              // RGB24
                media_type.formattype = new Guid(GUID.FORMAT_VideoInfo);                // VideoInfo
                grabber.SetMediaType(media_type);
                grabber.SetBufferSamples(false);                                        // サンプルコピー 無効.
                grabber.SetOneShot(false);                                              // One Shot 無効.
                //grabber.SetCallback(this.SampleGrabberCB, 0);		// 0:SampleCB メソッドを呼び出すよう指示する.
                grabber.SetCallback(this.SampleGrabberCB, 1);                           // 1:BufferCB メソッドを呼び出すよう指示する.
                SampleGrabberCB.Notify += SampleGrabberCB_Notify;
            }
            return(grabber);
        }
        private void ApplyVideoInput()
        {
            Dispose();
            Frame         = new byte[(width * height) * PixelSize];
            CapturedFrame = new byte[(width * height) * PixelSize];
            PreviewFrame  = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];
            if (VideoInput == null)
            {
                return;
            }
            GraphBuilder        = (IGraphBuilder) new FilterGraph();
            CaptureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            MediaControl        = (IMediaControl)GraphBuilder;
            CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
            SampleGrabber = new SampleGrabber() as ISampleGrabber;
            GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
            SetResolution(width, height);
            GraphBuilder.AddFilter(VideoInput, "Camera");
            SampleGrabber.SetBufferSamples(false);
            SampleGrabber.SetOneShot(false);
            SampleGrabber.GetConnectedMediaType(new AMMediaType());
            SampleGrabber.SetCallback(this, 1);
            CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, VideoInput, null,
                                             SampleGrabber as IBaseFilter);

            if (UpdateThread != null)
            {
                UpdateThread.Abort();
            }
            UpdateThread = new Thread(UpdateBuffer);
            UpdateThread.Start();
            MediaControl.Run();
            Marshal.ReleaseComObject(VideoInput);
        }
Example #3
0
        void TestOneShot()
        {
            int  hr;
            bool bDone;

            m_TestComplete.Reset();

            hr = m_isg.SetOneShot(true);
            DsError.ThrowExceptionForHR(hr);

            hr = m_imc.Run();
            Marshal.ThrowExceptionForHR(hr);

            // Get one sample
            bDone = m_TestComplete.WaitOne(10000, false);
            Thread.Sleep(100);
            m_TestComplete.Reset();

            // Should be able to get at least one
            Debug.Assert(bDone, "SetOneShot1");

            // Try to get another
            bDone = m_TestComplete.WaitOne(1000, false);

            // Should fail since we said "OneShot"
            // You may need to comment out the call to SetMediaType in
            // BuildGraph for this to work correctly.
            Debug.Assert(!bDone, "SetOneShot2");

            hr = m_imc.Stop();
            Marshal.ThrowExceptionForHR(hr);
        }
Example #4
0
        void SetupVideoGrabber()
        {
            AMMediaType media = new AMMediaType();
            int         hr    = grabberConfig.GetConnectedMediaType(media);

            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            if (((!media.formatType.Equals(FormatType.VideoInfo)) &&
                 (!media.formatType.Equals(FormatType.WaveEx))) ||
                (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

            hr = grabberConfig.SetBufferSamples(false);
            if (hr == 0)
            {
                hr = grabberConfig.SetOneShot(false);
            }
            if (hr == 0)
            {
                hr = grabberConfig.SetCallback(null, 0);
            }
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            grabberConfig.SetCallback(this, 1);
        }
Example #5
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        public VideoPlayer(string FileName, GraphicsDevice graphicsDevice)
        {
            try
            {
                currentState = VideoState.Stopped;

                filename = FileName;


                InitInterfaces();


                SampleGrabber  sg            = new SampleGrabber();
                ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber"));


                AMMediaType mt = new AMMediaType();
                mt.majorType  = MEDIATYPE_Video;    // Video
                mt.subType    = MEDIASUBTYPE_RGB24; // RGB24
                mt.formatType = FORMAT_VideoInfo;   // VideoInfo
                DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));


                DsError.ThrowExceptionForHR(gb.RenderFile(filename, null));


                DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));


                IVideoWindow pVideoWindow = (IVideoWindow)gb;
                DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));


                AMMediaType MediaType = new AMMediaType();
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                videoHeight     = pVideoHeader.BmiHeader.Height;
                videoWidth      = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate         = pVideoHeader.BitRate;
                DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration));


                videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel)
                bgrData         = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel)


                outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color);
            }
            catch
            {
                throw new Exception("Unable to Load or Play the video file");
            }
        }
        public DSVideoPlayer(string filename, GraphicsDevice graphicsDevice)
        {
            try
            {
                // Open DirectShow Interfaces
                InitInterfaces();

                Info = new DSVideoInfo();
                // Create a SampleGrabber Filter and add it to the FilterGraph
                SampleGrabber  sg            = new SampleGrabber();
                ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                DsError.ThrowExceptionForHR(FG_GraphBuilder.AddFilter((IBaseFilter)sg, "Grabber"));

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();
                mt.majorType  = DSVideoInfo.MEDIATYPE_Video;    // Video
                mt.subType    = DSVideoInfo.MEDIASUBTYPE_RGB24; // RGB24
                mt.formatType = DSVideoInfo.FORMAT_VideoInfo;   // VideoInfo
                DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));

                //// Construct the rest of the FilterGraph
                DsError.ThrowExceptionForHR(FG_GraphBuilder.RenderFile(filename, null));
                Info.FileName = filename;

                //// Set SampleGrabber Properties
                DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)FG_GraphBuilder;
                DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));

                //// Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                Info.Height          = pVideoHeader.BmiHeader.Height;
                Info.Width           = pVideoHeader.BmiHeader.Width;
                Info.AvgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                Info.BitRate         = pVideoHeader.BitRate;
                DsError.ThrowExceptionForHR(FG_MediaSeeking.GetDuration(out Info.Duration));

                // Create byte arrays to hold video data
                videoFrameBytes = new byte[(Info.Height * Info.Width) * 4]; // RGBA format (4 bytes per pixel)
                bgrData         = new byte[(Info.Height * Info.Width) * 3]; // BGR24 format (3 bytes per pixel)

                // Create Output Frame Texture2D with the height and width of the video
                outputFrame = new Texture2D(graphicsDevice, Info.Width, Info.Height, 1, TextureUsage.None, SurfaceFormat.Color);
            }
            catch (Exception ex)
            {
                throw new Exception("不能加载或播放该视频: " + ex.Message);
            }
        }
Example #7
0
    protected void Initialize()
    {
        FrameReady            = false;
        frame                 = new Texture2D(GraphicsDevice, Width, Height, false, SurfaceFormat.Color);
        FrameBGR              = new byte[(Width * Height) * 3];
        FrameRGBA             = new byte[(Width * Height) * 4];
        FrameGrayscale        = new byte[(Width * Height)];
        FrameHalfGrayscale    = new byte[(Width / 2 * Height / 2)];
        FrameQuarterGrayscale = new byte[(Width / 4 * Height / 4)];
        GraphBuilder          = (IGraphBuilder) new FilterGraph();
        CaptureGraphBuilder   = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
        MediaControl          = (IMediaControl)GraphBuilder;
        CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
        object         VideoInputObject = null;
        IBaseFilter    VideoInput       = null;
        IEnumMoniker   classEnum;
        ICreateDevEnum devEnum = (ICreateDevEnum) new CreateDevEnum();

        devEnum.CreateClassEnumerator(FilterCategory.VideoInputDevice, out classEnum, 0);
        Marshal.ReleaseComObject(devEnum);
        if (classEnum != null)
        {
            IMoniker[] moniker = new IMoniker[1];
            if (classEnum.Next(moniker.Length, moniker, IntPtr.Zero) == DEVICE_ID)
            {
                Guid iid = typeof(IBaseFilter).GUID;
                moniker[0].BindToObject(null, null, ref iid, out VideoInputObject);
            }
            Marshal.ReleaseComObject(moniker[0]);
            Marshal.ReleaseComObject(classEnum);
            VideoInput = (IBaseFilter)VideoInputObject;
        }
        if (VideoInput != null)
        {
            isRunning     = true;
            SampleGrabber = new SampleGrabber() as ISampleGrabber;
            GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
            AMMediaType Type = new AMMediaType()
            {
                majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo
            };
            SampleGrabber.SetMediaType(Type);
            GraphBuilder.AddFilter(VideoInput, "Camera");
            SampleGrabber.SetBufferSamples(false);
            SampleGrabber.SetOneShot(false);
            SampleGrabber.GetConnectedMediaType(new AMMediaType());
            SampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
            CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter);
            UpdateThread = new Thread(UpdateBuffer);
            UpdateThread.Start();
            MediaControl.Run();
            Marshal.ReleaseComObject(VideoInput);
        }
    }
Example #8
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            int hr;

            // Call back on the SampleCB routine
            hr = sampGrabber.SetCallback(this, 0);
            Marshal.ThrowExceptionForHR(hr);

            // Only one call
            hr = sampGrabber.SetOneShot(true);
            Marshal.ThrowExceptionForHR(hr);
        }
Example #9
0
        private static SampleGrabber CreateSampleGrabber(Guid majorType, Guid subType, Guid formatType,
                                                         SampleGrabberGraph.SampleGrabberCallback.BufferCBEventHandler callback)
        {
            SampleGrabber  sampleGrabber = new SampleGrabber();
            ISampleGrabber grabber       = sampleGrabber as ISampleGrabber;

            grabber.SetMediaType(new AMMediaType {
                majorType = majorType, subType = subType, formatType = formatType
            });
            grabber.SetBufferSamples(false);
            grabber.SetOneShot(false);
            grabber.SetCallback(new SampleGrabberCallback()
            {
                OnBuffer = callback
            }, 1);                                                                       // 0 = Sample, 1 = Buffer
            return(sampleGrabber);
        }
Example #10
0
        private void InitializeCapture()
        {
            graphBuilder = (IGraphBuilder) new FilterGraph();
            mediaControl = (IMediaControl)graphBuilder;

            captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter videoInput = GetVideoInputObject();

            if (null != videoInput)
            {
                SetConfigurations(videoInput);

                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                hr            = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render");
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.AddFilter(videoInput, "Camera");
                DsError.ThrowExceptionForHR(hr);

                AMMediaType type = new AMMediaType()
                {
                    majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo
                };
                hr = sampleGrabber.SetMediaType(type);
                DsError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(type);

                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.GetConnectedMediaType(new AMMediaType());

                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
                hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter);
                DsError.ThrowExceptionForHR(hr);

                Marshal.ReleaseComObject(videoInput);
            }
        }
Example #11
0
        protected void Initialize()
        {
            FrameReady          = false;
            GraphBuilder        = (IGraphBuilder) new FilterGraph();
            CaptureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            MediaControl        = (IMediaControl)GraphBuilder;
            CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
            var videoInput = GetVideoInputObjectForCamera(Camera);

            if (videoInput != null)
            {
                isRunning     = true;
                SampleGrabber = new SampleGrabber() as ISampleGrabber;
                GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
                var Type = new AMMediaType
                {
                    majorType  = MediaType.Video,
                    subType    = MediaSubType.RGB24,
                    formatType = FormatType.VideoInfo
                };
                SampleGrabber.SetMediaType(Type);
                GraphBuilder.AddFilter(videoInput, "Camera");
                SetCaptureFormat(videoInput);
                SampleGrabber.SetBufferSamples(false);
                SampleGrabber.SetOneShot(false);
                SampleGrabber.GetConnectedMediaType(new AMMediaType());
                SampleGrabber.SetCallback(this, 1);
                CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null,
                                                 SampleGrabber as IBaseFilter);
                frame        = new Texture2D(GraphicsDevice, Width, Height, false, SurfaceFormat.Color);
                FrameBGR     = new byte[Width * Height * 3];
                FrameRGBA    = new byte[Width * Height * 4];
                UpdateThread = new Thread(UpdateBuffer);
                UpdateThread.IsBackground = true;
                UpdateThread.Start();
                MediaControl.Run();
                Marshal.ReleaseComObject(videoInput);
            }
        }
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int         hr;

            // Set the media type to Video/RBG24
            media            = new AMMediaType();
            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            sampGrabber.SetBufferSamples(false);
            sampGrabber.SetOneShot(false);
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Example #13
0
        /// <summary>
        /// Crée le filtre SampleGrabber
        /// </summary>
        void CreateSampleGrabber()
        {
            Type comType = Type.GetTypeFromCLSID(new Guid(SAMPLE_GRABBER));

            _sampleGrabber = (ISampleGrabber)Activator.CreateInstance(comType);

            AMMediaType mediaType = new AMMediaType
            {
                majorType  = MediaType.Video,
                subType    = MediaSubType.RGB32,
                formatType = FormatType.VideoInfo
            };

            _sampleGrabber.SetMediaType(mediaType);

            DsUtils.FreeAMMediaType(mediaType);

            int hr = _sampleGrabber.SetOneShot(true);

            DsError.ThrowExceptionForHR(hr);

            hr = _sampleGrabber.SetBufferSamples(true);
            DsError.ThrowExceptionForHR(hr);
        }
Example #14
0
        private static ISampleGrabber AddSampleGrabber(IGraphBuilder graph, string filterName, Guid majorType, Guid minorType)
        {
            ISampleGrabber isg = (ISampleGrabber) new SampleGrabber();

            int hr = graph.AddFilter((IBaseFilter)isg, filterName);

            DsError.ThrowExceptionForHR(hr);

            AMMediaType mt = new AMMediaType();

            mt.majorType = majorType;
            mt.subType   = minorType;

            hr = isg.SetMediaType(mt);
            DsError.ThrowExceptionForHR(hr);

            hr = isg.SetBufferSamples(true);
            DsError.ThrowExceptionForHR(hr);

            hr = isg.SetOneShot(true);
            DsError.ThrowExceptionForHR(hr);

            return(isg);
        }
Example #15
0
        private void WorkerThread(bool runGraph)
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            bool isSapshotSupported            = false;


            Grabber videoGrabber    = new Grabber(this, false);
            Grabber snapshotGrabber = new Grabber(this, true);


            object captureGraphObject    = null;
            object graphObject           = null;
            object videoGrabberObject    = null;
            object snapshotGrabberObject = null;
            object crossbarObject        = null;


            ICaptureGraphBuilder2 captureGraph          = null;
            IFilterGraph2         graph                 = null;
            IBaseFilter           sourceBase            = null;
            IBaseFilter           videoGrabberBase      = null;
            IBaseFilter           snapshotGrabberBase   = null;
            ISampleGrabber        videoSampleGrabber    = null;
            ISampleGrabber        snapshotSampleGrabber = null;
            IMediaControl         mediaControl          = null;
            IAMVideoControl       videoControl          = null;
            IMediaEventEx         mediaEvent            = null;
            IPin        pinStillImage = null;
            IAMCrossbar crossbar      = null;

            try
            {
                Type type = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating capture graph builder");
                }


                captureGraphObject = Activator.CreateInstance(type);
                captureGraph       = (ICaptureGraphBuilder2)captureGraphObject;


                type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }


                graphObject = Activator.CreateInstance(type);
                graph       = (IFilterGraph2)graphObject;


                captureGraph.SetFiltergraph((IGraphBuilder)graph);


                sourceObject = FilterInfo.CreateFilter(deviceMoniker);
                if (sourceObject == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }


                sourceBase = (IBaseFilter)sourceObject;


                try
                {
                    videoControl = (IAMVideoControl)sourceObject;
                }
                catch
                {
                }


                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }


                videoGrabberObject = Activator.CreateInstance(type);
                videoSampleGrabber = (ISampleGrabber)videoGrabberObject;
                videoGrabberBase   = (IBaseFilter)videoGrabberObject;

                snapshotGrabberObject = Activator.CreateInstance(type);
                snapshotSampleGrabber = (ISampleGrabber)snapshotGrabberObject;
                snapshotGrabberBase   = (IBaseFilter)snapshotGrabberObject;


                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(videoGrabberBase, "grabber_video");
                graph.AddFilter(snapshotGrabberBase, "grabber_snapshot");


                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;

                videoSampleGrabber.SetMediaType(mediaType);
                snapshotSampleGrabber.SetMediaType(mediaType);


                captureGraph.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, sourceBase, typeof(IAMCrossbar).GUID, out crossbarObject);
                if (crossbarObject != null)
                {
                    crossbar = (IAMCrossbar)crossbarObject;
                }
                isCrossbarAvailable = (crossbar != null);
                crossbarVideoInputs = ColletCrossbarVideoInputs(crossbar);

                if (videoControl != null)
                {
                    captureGraph.FindPin(sourceObject, PinDirection.Output,
                                         PinCategory.StillImage, MediaType.Video, false, 0, out pinStillImage);

                    if (pinStillImage != null)
                    {
                        VideoControlFlags caps;
                        videoControl.GetCaps(pinStillImage, out caps);
                        isSapshotSupported = ((caps & VideoControlFlags.ExternalTriggerEnable) != 0);
                    }
                }


                videoSampleGrabber.SetBufferSamples(false);
                videoSampleGrabber.SetOneShot(false);
                videoSampleGrabber.SetCallback(videoGrabber, 1);


                snapshotSampleGrabber.SetBufferSamples(true);
                snapshotSampleGrabber.SetOneShot(false);
                snapshotSampleGrabber.SetCallback(snapshotGrabber, 1);


                GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase,
                                                          PinCategory.Capture, videoResolution, ref videoCapabilities);
                if (isSapshotSupported)
                {
                    GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase,
                                                              PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities);
                }
                else
                {
                    snapshotCapabilities = new VideoCapabilities[0];
                }


                lock ( cacheVideoCapabilities )
                {
                    if ((videoCapabilities != null) && (!cacheVideoCapabilities.ContainsKey(deviceMoniker)))
                    {
                        cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities);
                    }
                }
                lock ( cacheSnapshotCapabilities )
                {
                    if ((snapshotCapabilities != null) && (!cacheSnapshotCapabilities.ContainsKey(deviceMoniker)))
                    {
                        cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities);
                    }
                }

                if (runGraph)
                {
                    captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, sourceBase, null, videoGrabberBase);

                    if (videoSampleGrabber.GetConnectedMediaType(mediaType) == 0)
                    {
                        VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                        videoGrabber.Width  = vih.BmiHeader.Width;
                        videoGrabber.Height = vih.BmiHeader.Height;

                        mediaType.Dispose( );
                    }

                    if ((isSapshotSupported) && (provideSnapshots))
                    {
                        captureGraph.RenderStream(PinCategory.StillImage, MediaType.Video, sourceBase, null, snapshotGrabberBase);

                        if (snapshotSampleGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                            snapshotGrabber.Width  = vih.BmiHeader.Width;
                            snapshotGrabber.Height = vih.BmiHeader.Height;

                            mediaType.Dispose( );
                        }
                    }


                    mediaControl = (IMediaControl)graphObject;


                    mediaEvent = (IMediaEventEx)graphObject;
                    IntPtr   p1, p2;
                    DsEvCode code;


                    mediaControl.Run( );

                    if ((isSapshotSupported) && (provideSnapshots))
                    {
                        startTime = DateTime.Now;
                        videoControl.SetMode(pinStillImage, VideoControlFlags.ExternalTriggerEnable);
                    }

                    do
                    {
                        if (mediaEvent != null)
                        {
                            if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                            {
                                mediaEvent.FreeEventParams(code, p1, p2);

                                if (code == DsEvCode.DeviceLost)
                                {
                                    reasonToStop = ReasonToFinishPlaying.DeviceLost;
                                    break;
                                }
                            }
                        }

                        if (needToSetVideoInput)
                        {
                            needToSetVideoInput = false;

                            if (isCrossbarAvailable.Value)
                            {
                                SetCurrentCrossbarInput(crossbar, crossbarVideoInput);
                                crossbarVideoInput = GetCurrentCrossbarInput(crossbar);
                            }
                        }

                        if (needToSimulateTrigger)
                        {
                            needToSimulateTrigger = false;

                            if ((isSapshotSupported) && (provideSnapshots))
                            {
                                videoControl.SetMode(pinStillImage, VideoControlFlags.Trigger);
                            }
                        }

                        if (needToDisplayPropertyPage)
                        {
                            needToDisplayPropertyPage = false;
                            DisplayPropertyPage(parentWindowForPropertyPage, sourceObject);

                            if (crossbar != null)
                            {
                                crossbarVideoInput = GetCurrentCrossbarInput(crossbar);
                            }
                        }

                        if (needToDisplayCrossBarPropertyPage)
                        {
                            needToDisplayCrossBarPropertyPage = false;

                            if (crossbar != null)
                            {
                                DisplayPropertyPage(parentWindowForPropertyPage, crossbar);
                                crossbarVideoInput = GetCurrentCrossbarInput(crossbar);
                            }
                        }
                    }while (!stopEvent.WaitOne(100, false));

                    mediaControl.Stop( );
                }
            }
            catch (Exception exception)
            {
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                captureGraph  = null;
                graph         = null;
                sourceBase    = null;
                mediaControl  = null;
                videoControl  = null;
                mediaEvent    = null;
                pinStillImage = null;
                crossbar      = null;

                videoGrabberBase      = null;
                snapshotGrabberBase   = null;
                videoSampleGrabber    = null;
                snapshotSampleGrabber = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceObject != null)
                {
                    Marshal.ReleaseComObject(sourceObject);
                    sourceObject = null;
                }
                if (videoGrabberObject != null)
                {
                    Marshal.ReleaseComObject(videoGrabberObject);
                    videoGrabberObject = null;
                }
                if (snapshotGrabberObject != null)
                {
                    Marshal.ReleaseComObject(snapshotGrabberObject);
                    snapshotGrabberObject = null;
                }
                if (captureGraphObject != null)
                {
                    Marshal.ReleaseComObject(captureGraphObject);
                    captureGraphObject = null;
                }
                if (crossbarObject != null)
                {
                    Marshal.ReleaseComObject(crossbarObject);
                    crossbarObject = null;
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
        public void SetUpForTs(ISampleGrabberCB grabber, int methodToCall)
        {
            FilterGraphTools.DisconnectPins(mpeg2Demux);
            //FilterGraphTools.DisconnectPins(demodulator);
            FilterGraphTools.DisconnectPins(audioRenderer);
            FilterGraphTools.DisconnectPins(videoRenderer);
            //graphBuilder.RemoveFilter(audioRenderer);
            //graphBuilder.RemoveFilter(videoRenderer);

            sampleGrabber = (ISampleGrabber)new SampleGrabber();
            AMMediaType media = new AMMediaType();

            media.majorType = MediaType.Stream;
            media.subType = MediaSubType.Mpeg2Transport;
            media.formatType = FormatType.MpegStreams;
            sampleGrabber.SetOneShot(false);
            sampleGrabber.SetBufferSamples(true);
            int hr = sampleGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Sample Grabber");

            nullRenderer = (IBaseFilter)new NullRenderer();
            graphBuilder.AddFilter(nullRenderer, "NULL Renderer");

            IPin pinIn = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Input");
            IPin pinOut = DsFindPin.ByDirection(capture, PinDirection.Output, 0);

            IEnumMediaTypes eMedia;
            pinOut.EnumMediaTypes(out eMedia);

            AMMediaType[] mediaTypes = new AMMediaType[1];
            eMedia.Next(mediaTypes.Length, mediaTypes, IntPtr.Zero);

            hr = sampleGrabber.SetMediaType(mediaTypes[0]);
            DsError.ThrowExceptionForHR(hr);

            pinOut.Disconnect();
            PinInfo info;
            pinOut.QueryPinInfo(out info);

            hr = graphBuilder.ConnectDirect(pinOut, pinIn, mediaTypes[0]);
            //hr = graphBuilder.Connect(pinOut, pinIn);
            DsError.ThrowExceptionForHR(hr);

            // Release the Pin
            Marshal.ReleaseComObject(pinIn);

            pinIn = DsFindPin.ByName(nullRenderer, "In");
            pinOut = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Output");

            hr = graphBuilder.Connect(pinOut, pinIn);
            DsError.ThrowExceptionForHR(hr);

            sampleGrabber.SetCallback(grabber, methodToCall);

            // Release the Pin
            Marshal.ReleaseComObject(pinIn);
            pinIn = null;
        }
Example #17
0
    /// <summary>
    /// Connects to the property changed events of the camera settings.
    /// </summary>
    //private void Initialize()
    //{
    //    //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged;
    //    //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged;
    //    //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged;

    //    //stopwatch = new Stopwatch();
    //}

    /// <summary>
    /// Build the capture graph for grabber. 
    /// </summary>
    /// <param name="dev">The index of the new capture device.</param>
    /// <param name="frameRate">The framerate to use.</param>
    /// <param name="width">The width to use.</param>
    /// <param name="height">The height to use.</param>
    /// <returns>True, if successful, otherwise false.</returns>
    private bool SetupGraph(DsDevice dev, int frameRate, int width, int height)
    {
      int hr;
      fps = frameRate; // Not measured, only to expose FPS externally 
      cameraControl = null;
      capFilter = null;

      // Get the graphbuilder object
      graphBuilder = (IFilterGraph2)new FilterGraph();
      mediaControl = graphBuilder as IMediaControl;

      try
      {
        // Create the ICaptureGraphBuilder2
        capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

        // Create the SampleGrabber interface
        sampGrabber = (ISampleGrabber)new SampleGrabber();

        // Start building the graph
        hr = capGraph.SetFiltergraph(graphBuilder);
        //if (hr != 0)
        //    ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " +
        //                          DsError.GetErrorText(hr));

#if DEBUG
        this.rotEntry = new DsROTEntry(this.graphBuilder);
#endif

        this.capFilter = CreateFilter(
       FilterCategory.VideoInputDevice,
       dev.Name);
        if (this.capFilter != null)
        {
          hr = graphBuilder.AddFilter(this.capFilter, "Video Source");
          DsError.ThrowExceptionForHR(hr);
        }

        //// Add the video device
        //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
        //if (hr != 0)
        //    ErrorLogger.WriteLine(
        //        "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " +
        //        DsError.GetErrorText(hr));

        var baseGrabFlt = (IBaseFilter)sampGrabber;

        ConfigureSampleGrabber(sampGrabber);

        // Add the frame grabber to the graph
        hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");

        //if (hr != 0)
        //    ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " +
        //                          DsError.GetErrorText(hr));

        // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM
        /*
        if (!defaultMode)
        {
            m_icc = capFilter as IAMCameraControl;
            CameraControlFlags CamFlags = new CameraControlFlags();
            int pMin, pMax, pStep, pDefault;

            hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags);
            m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None);
        }
        */


        //IBaseFilter smartTee = new SmartTee() as IBaseFilter;

        //// Add the smart tee filter to the graph
        //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee");
        //Marshal.ThrowExceptionForHR(hr);

        // Connect the video source output to the smart tee
        //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee);

        hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt);
        var errorText = DsError.GetErrorText(hr);

        cameraControl = capFilter as IAMCameraControl;

        // Set videoProcAmp
        object obj;
        var iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770");
        DirectShowDevices.Instance.Cameras[deviceNumber].DirectshowDevice.Mon.BindToObject(
            null,
            null,
            ref iid_IBaseFilter,
            out obj);

        videoProcAmp = obj as IAMVideoProcAmp;

        // If any of the default config items are set
        if (frameRate + height + width > 0)
          SetConfigParms(capGraph, capFilter, frameRate, width, height);

        // Check for successful rendering, if this failed the class cannot be used, so dispose the resources and return false.
        if (hr < 0)
        {
          Cleanup();
          return false;
        }
        else
        {
          // Otherwise update the SampleGrabber.
          SaveSizeInfo(sampGrabber);
          hr = sampGrabber.SetBufferSamples(false);

          if (hr == 0)
          {
            hr = sampGrabber.SetOneShot(false);
            hr = sampGrabber.SetCallback(this, 1);
          }

          //if (hr < 0)
          //    ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()");
        }
      }
      catch (Exception)
      {
        //ErrorLogger.ProcessException(ex, false);

        Cleanup();
        return false;
      }

      return true;
    }
Example #18
0
        // Thread entry point
        public void WorkerThread()
        {
            bool failed = false;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder     graph       = null;
            IBaseFilter       sourceBase  = null;
            IBaseFilter       grabberBase = null;
            ISampleGrabber    sg          = null;
            IFileSourceFilter fileSource  = null;
            IMediaControl     mc          = null;
            IMediaEventEx     mediaEvent  = null;

            int code, param1, param2;

            while ((!failed) && (!stopEvent.WaitOne(0, true)))
            {
                try
                {
                    // Get type for filter graph
                    Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating filter graph");
                    }

                    // create filter graph
                    graphObj = Activator.CreateInstance(srvType);
                    graph    = (IGraphBuilder)graphObj;

                    // Get type for windows media source filter
                    srvType = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating WM source");
                    }

                    // create windows media source filter
                    sourceObj  = Activator.CreateInstance(srvType);
                    sourceBase = (IBaseFilter)sourceObj;

                    // Get type for sample grabber
                    srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating sample grabber");
                    }

                    // create sample grabber
                    grabberObj  = Activator.CreateInstance(srvType);
                    sg          = (ISampleGrabber)grabberObj;
                    grabberBase = (IBaseFilter)grabberObj;

                    // add source filter to graph
                    graph.AddFilter(sourceBase, "source");
                    graph.AddFilter(grabberBase, "grabber");

                    // set media type
                    AMMediaType mt = new AMMediaType();
                    mt.majorType = MediaType.Video;
                    mt.subType   = MediaSubType.RGB24;
                    sg.SetMediaType(mt);

                    // load file
                    fileSource = (IFileSourceFilter)sourceObj;
                    fileSource.Load(this.source, null);

                    // connect pins
                    if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                    {
                        throw new ApplicationException("Failed connecting filters");
                    }

                    // get media type
                    if (sg.GetConnectedMediaType(mt) == 0)
                    {
                        VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                        grabber.Width  = vih.BmiHeader.Width;
                        grabber.Height = vih.BmiHeader.Height;
                        mt.Dispose();
                    }

                    // render
                    graph.Render(DSTools.GetOutPin(grabberBase, 0));

                    //
                    sg.SetBufferSamples(false);
                    sg.SetOneShot(false);
                    sg.SetCallback(grabber, 1);

                    // window
                    IVideoWindow win = (IVideoWindow)graphObj;
                    win.put_AutoShow(false);
                    win = null;

                    // get events interface
                    mediaEvent = (IMediaEventEx)graphObj;

                    // get media control
                    mc = (IMediaControl)graphObj;

                    // run
                    mc.Run();

                    while (!stopEvent.WaitOne(0, true))
                    {
                        Thread.Sleep(100);

                        // get an event
                        if (mediaEvent.GetEvent(out code, out param1, out param2, 0) == 0)
                        {
                            // release params
                            mediaEvent.FreeEventParams(code, param1, param2);

                            //
                            if (code == (int)EventCode.Complete)
                            {
                                break;
                            }
                        }
                    }

                    mc.StopWhenReady();
                }
                // catch any exceptions
                catch (Exception e)
                {
                    System.Diagnostics.Debug.WriteLine("----: " + e.Message);
                    failed = true;
                }
                // finalization block
                finally
                {
                    // release all objects
                    mediaEvent  = null;
                    mc          = null;
                    fileSource  = null;
                    graph       = null;
                    sourceBase  = null;
                    grabberBase = null;
                    sg          = null;

                    if (graphObj != null)
                    {
                        Marshal.ReleaseComObject(graphObj);
                        graphObj = null;
                    }
                    if (sourceObj != null)
                    {
                        Marshal.ReleaseComObject(sourceObj);
                        sourceObj = null;
                    }
                    if (grabberObj != null)
                    {
                        Marshal.ReleaseComObject(grabberObj);
                        grabberObj = null;
                    }
                }
            }
        }
Example #19
0
        /// <summary>;
        /// Starts grabbing images from the capture device
        /// </summary>
        public virtual void Start()
        {
            if (_captureTask != null)
            {
                Stop();
            }

            _captureTask = new Task(() =>
            {
                // Create new grabber
                _capGrabber = new CapGrabber();
                _capGrabber.PropertyChanged += capGrabber_PropertyChanged;
                _capGrabber.NewFrameArrived += capGrabber_NewFrameArrived;
                _stopSignal = new ManualResetEvent(false);

                _graph        = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IFilterGraph2;
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                var outputPin = _sourceObject.GetPin(PinCategory.Capture, 0);
                SelectWebcamResolution(outputPin);

                _grabber       = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                if (_graph == null)
                {
                    return;
                }
                ;

                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");
                using (var mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType   = MediaSubTypes.RGB32;
                    if (_grabber != null)
                    {
                        _grabber.SetMediaType(mediaType);


                        var inputPin = _grabberObject.GetPin(PinDirection.Input, 0);
                        if (_graph.Connect(outputPin, inputPin) >= 0)
                        {
                            if (_grabber.GetConnectedMediaType(mediaType) == 0)
                            {
                                var header         = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                _capGrabber.Width  = header.BmiHeader.Width;
                                _capGrabber.Height = header.BmiHeader.Height;
                            }
                        }
                        _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                        _grabber.SetBufferSamples(false);
                        _grabber.SetOneShot(false);
                        _grabber.SetCallback(_capGrabber, 1);
                    }

                    // Get the video window
                    var wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);

                    // Create the control and run
                    _control = (IMediaControl)_graph;

                    _control.Run();

                    // Wait for the stop signal
                    _stopSignal.WaitOne();
                    Cleanup();
                }
            });
            _captureTask.Start();
        }
Example #20
0
        // Thread entry point
        public void WorkerThread()
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder  graph       = null;
            IBaseFilter    sourceBase  = null;
            IBaseFilter    grabberBase = null;
            ISampleGrabber sg          = null;
            IMediaControl  mc          = null;

            try
            {
                // Get type for filter graph
                Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObj = Activator.CreateInstance(srvType);
                graph    = (IGraphBuilder)graphObj;

                // ----
                UCOMIBindCtx bindCtx = null;
                UCOMIMoniker moniker = null;
                int          n       = 0;

                // create bind context
                if (Win32.CreateBindCtx(0, out bindCtx) == 0)
                {
                    // convert moniker`s string to a moniker
                    if (Win32.MkParseDisplayName(bindCtx, source, ref n, out moniker) == 0)
                    {
                        // get device base filter
                        Guid filterId = typeof(IBaseFilter).GUID;
                        moniker.BindToObject(null, null, ref filterId, out sourceObj);

                        Marshal.ReleaseComObject(moniker);
                        moniker = null;
                    }
                    Marshal.ReleaseComObject(bindCtx);
                    bindCtx = null;
                }
                // ----

                if (sourceObj == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }

                sourceBase = (IBaseFilter)sourceObj;

                // Get type for sample grabber
                srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObj  = Activator.CreateInstance(srvType);
                sg          = (ISampleGrabber)grabberObj;
                grabberBase = (IBaseFilter)grabberObj;

                // add source filter to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType   = MediaSubType.RGB24;
                sg.SetMediaType(mt);

                // connect pins
                if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mt.Dispose();
                }

                // render
                graph.Render(DSTools.GetOutPin(grabberBase, 0));

                //
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                // window
                IVideoWindow win = (IVideoWindow)graphObj;
                win.put_AutoShow(false);
                win = null;


                // get media control
                mc = (IMediaControl)graphObj;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine("----: " + e.Message);
            }
            // finalization block
            finally
            {
                // release all objects
                mc          = null;
                graph       = null;
                sourceBase  = null;
                grabberBase = null;
                sg          = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (sourceObj != null)
                {
                    Marshal.ReleaseComObject(sourceObj);
                    sourceObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
Example #21
0
        /// <summary>
        /// Open a new video feed (either web-cam or video file).
        /// </summary>
        /// <param name="filter">Specifies the web-cam filter to use, or <i>null</i> when opening a video file.</param>
        /// <param name="pb">Specifies the output window, or <i>null</i> when running headless and only receiving snapshots.</param>
        /// <param name="strFile">Specifies the video file to use, or <i>null</i> when opening a web-cam feed.</param>
        /// <param name="vidCap">Optionally specifies the video capabilities to use, or <i>null</i> to ignore and use the default video capabilities.</param>
        /// <returns>The duration (if any) is returned, or 0.</returns>
        /// <remarks>To get the video capabilities see the GetVideoCapatiblities method.</remarks>
        public long Open(Filter filter, PictureBox pb, string strFile, VideoCapability vidCap = null)
        {
            int hr;

            if (filter != null && strFile != null)
            {
                throw new ArgumentException("Both the filter and file are non NULL - only one of these can be used at a time; The filter is used with the web-cam and the file is used with a video file.");
            }

            m_selectedFilter = filter;
            m_graphBuilder   = (IFilterGraph2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

            // When using a web-cam, create the moniker for the filter and add the filter to the graph.
            if (strFile == null)
            {
                IMoniker moniker = m_selectedFilter.CreateMoniker();
                m_graphBuilder.AddSourceFilterForMoniker(moniker, null, m_selectedFilter.Name, out m_camFilter);
                Marshal.ReleaseComObject(moniker);
                m_camControl = m_camFilter as IAMCameraControl;

                // Create the capture builder used to build the web-cam filter graph.
                m_captureGraphBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2, true));
                hr = m_captureGraphBuilder.SetFiltergraph(m_graphBuilder as IGraphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Add the web-cam filter to the graph.
                hr = m_graphBuilder.AddFilter(m_camFilter, m_selectedFilter.Name);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Set the desired video capabilities.
                if (vidCap != null)
                {
                    setVideoCapabilities(m_captureGraphBuilder, m_camFilter, vidCap);
                }
            }
            else
            {
                // Build the graph with the video file.
                hr = m_graphBuilder.RenderFile(strFile, null);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                m_mediaSeek = m_graphBuilder as IMediaSeeking;

                if (pb != null)
                {
                    m_videoFrameStep = m_graphBuilder as IVideoFrameStep;
                }
            }

            // Create the sample grabber used to get snapshots.
            m_sampleGrabber  = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true));
            m_baseGrabFilter = m_sampleGrabber as IBaseFilter;
            m_mediaControl   = m_graphBuilder as IMediaControl;

            // When using a target window, get the video window used with the target output window
            if (pb != null)
            {
                m_mediaEventEx = m_graphBuilder as IMediaEventEx;
                m_videoWindow  = m_graphBuilder as IVideoWindow;
            }
            // Otherwise create the null renderer for no video output is needed (only snapshots).
            else
            {
                m_nullRenderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.NullRenderer, true));
            }

            // Add the sample grabber to the filter graph.
            hr = m_graphBuilder.AddFilter(m_baseGrabFilter, "Ds.Lib Grabber");
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Turn off the sample grabber buffers.
            hr = m_sampleGrabber.SetBufferSamples(false);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Turn off the sample grabber one-shot.
            hr = m_sampleGrabber.SetOneShot(false);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Turn ON the sample grabber callback where video data is to be received.
            hr = m_sampleGrabber.SetCallback(this, 1);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Set the media format used by the sample grabber.
            AMMediaType media = new AMMediaType();

            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;

            hr = m_sampleGrabber.SetMediaType(media);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Connect the WebCam Filters and Frame Grabber.
            if (m_selectedFilter != null)
            {
                Guid cat;
                Guid med;

                cat = PinCategory.Preview;
                med = MediaType.Video;
                hr  = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, null);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, m_baseGrabFilter);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }
            // Connect the Frame Grabber and (optionally the Null Renderer)
            else
            {
                // Get the video decoder and its pins.
                m_videoFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Decoder", false);

                IPin pOutput;
                hr = Utility.GetPin(m_videoFilter, PinDirection.Output, out pOutput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                IPin pInput;
                hr = pOutput.ConnectedTo(out pInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                PinInfo pinInfo;
                hr = pInput.QueryPinInfo(out pinInfo);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Get the sample grabber pins.
                IPin pGrabInput;
                hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Input, out pGrabInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                IPin pGrabOutput;
                hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Disconnect the source filter output and the input it is connected to.
                hr = pOutput.Disconnect();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = pInput.Disconnect();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Connect the source output to the Grabber input.
                hr = m_graphBuilder.Connect(pOutput, pGrabInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // When rendering video output, connect the Grabber output to the original downstream input that the source was connected to.
                if (m_nullRenderer == null)
                {
                    hr = m_graphBuilder.Connect(pGrabOutput, pInput);
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                }

                Marshal.ReleaseComObject(pOutput);
                Marshal.ReleaseComObject(pInput);
                Marshal.ReleaseComObject(pGrabInput);
                Marshal.ReleaseComObject(pGrabOutput);
            }

            // Remove sound filters.
            IBaseFilter soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Audio Decoder", false);

            if (soundFilter != null)
            {
                hr = m_graphBuilder.RemoveFilter(soundFilter);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Marshal.ReleaseComObject(soundFilter);
            }

            soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Sound", false);
            if (soundFilter != null)
            {
                hr = m_graphBuilder.RemoveFilter(soundFilter);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Marshal.ReleaseComObject(soundFilter);
            }

            // When using a headless (no video rendering) setup, connect the null renderer to the Sample Grabber.
            if (m_nullRenderer != null)
            {
                // Add the null renderer.
                hr = m_graphBuilder.AddFilter(m_nullRenderer, "Null Renderer");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Get the sample grabber output pin.
                IPin pGrabOutput;
                hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Get the null renderer input pin.
                IPin pInput;
                hr = Utility.GetPin(m_nullRenderer, PinDirection.Input, out pInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Disconnect the sample grabber pin.
                hr = pGrabOutput.Disconnect();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Connect the Grabber output to the null renderer.
                hr = m_graphBuilder.Connect(pGrabOutput, pInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Marshal.ReleaseComObject(pInput);
                Marshal.ReleaseComObject(pGrabOutput);

                // Remove the Video Renderer for it is no longer needed.
                IBaseFilter ivideorender = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Renderer");
                if (ivideorender != null)
                {
                    m_graphBuilder.RemoveFilter(ivideorender);
                    Marshal.ReleaseComObject(ivideorender);
                }
            }

            // Get the sample grabber media settings and video header.
            media = new AMMediaType();
            hr    = m_sampleGrabber.GetConnectedMediaType(media);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            if ((media.formatType != FormatType.VideoInfo &&
                 media.formatType != FormatType.WaveEx &&
                 media.formatType != FormatType.MpegVideo) ||
                media.formatPtr == IntPtr.Zero)
            {
                throw new Exception("Media grabber format is unknown.");
            }

            // Get the video header with frame sizing information.
            m_videoInfoHeader = Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)) as VideoInfoHeader;
            Marshal.FreeCoTaskMem(media.formatPtr);
            media.formatPtr = IntPtr.Zero;

            // If we are rendering video output, setup the video window (which requires a message pump).
            if (m_videoWindow != null)
            {
                // setup the video window
                hr = m_videoWindow.put_Owner(pb.Handle);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = m_videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }


                // resize the window
                hr = m_videoWindow.SetWindowPosition(0, 0, pb.Width, pb.Height);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = m_videoWindow.put_Visible(DsHlp.OATRUE);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Subscribe to the picturebox size changed event.
                pb.SizeChanged += Pb_SizeChanged;
            }


            // start the capturing
            hr = m_mediaControl.Run();
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // When using a video file, immediately stop at the start.
            if (strFile != null)
            {
                hr = m_mediaControl.Pause();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }

            // When using a media file, we need to save the video file's duration.
            if (m_mediaSeek != null)
            {
                hr = m_mediaSeek.GetDuration(out m_lDuration);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }

            m_bConnected = true;

            return(m_lDuration);
        }
        private void InitializeCapture()
        {
            graphBuilder = (IGraphBuilder)new FilterGraph();
            mediaControl = (IMediaControl)graphBuilder;

            captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter videoInput = GetVideoInputObject();
            if (null != videoInput)
            {
                SetConfigurations(videoInput);

                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render");
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.AddFilter(videoInput, "Camera");
                DsError.ThrowExceptionForHR(hr);

                AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo };
                hr = sampleGrabber.SetMediaType(type);
                DsError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(type);

                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.GetConnectedMediaType(new AMMediaType());

                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
                hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter);
                DsError.ThrowExceptionForHR(hr);

                Marshal.ReleaseComObject(videoInput);
            }
        }
Example #23
0
        private void CaptureVideo(IntPtr ctlHandle)
        {
            int hr = 0;
            IBaseFilter sourceFilter = null;
            try
            {
                // Get DirectShow interfaces
                GetInterfaces(ctlHandle);
                // Attach the filter graph to the capture graph
                hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder);
                //captureGraphBuilder.RenderStream(PinCategory.Preview,MediaType.Video,
                DsError.ThrowExceptionForHR(hr);
                // Use the system device enumerator and class enumerator to find
                // a video capture/preview device, such as a desktop USB video camera.
                sourceFilter = FindCaptureDevice();
                if (sourceFilter == null)
                {
                    log("Couldn't find a video input device.");
                    return;
                }
                // Add Capture filter to our graph.
                hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture");
                DsError.ThrowExceptionForHR(hr);

                this.samplegrabber = (ISampleGrabber)new SampleGrabber();
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType = MediaSubType.RGB24;
                mt.formatType = FormatType.VideoInfo;
                samplegrabber.SetMediaType(mt);
                //samplegrabber.

                hr = this.graphBuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber");
                DsError.ThrowExceptionForHR(hr);


                IBaseFilter nullRenderer = (IBaseFilter)new NullRenderer();
                hr = graphBuilder.AddFilter(nullRenderer, "Null Renderer");



                // Render the preview pin on the video capture filter
                // Use this instead of this.graphBuilder.RenderFile
                hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (IBaseFilter)samplegrabber, nullRenderer);
                //DsError.ThrowExceptionForHR(hr);
                if (hr != 0) log(DsError.GetErrorText(hr));



                // Now that the filter has been added to the graph and we have
                // rendered its stream, we can release this reference to the filter.
                Marshal.ReleaseComObject(sourceFilter);
                // Set video window style and position

                //SetupVideoWindow(ctlHandle);

                // Add our graph to the running object table, which will allow
                // the GraphEdit application to "spy" on our graph
                rot = new DsROTEntry(this.graphBuilder);
                // Start previewing video data
                hr = this.mediaControl.Run();
                DsError.ThrowExceptionForHR(hr);
                // Remember current state
                this.currentState = PlayState.Running;


                samplegrabber.SetBufferSamples(true);
                samplegrabber.SetOneShot(false);

            }
            catch
            {
                MessageBox.Show("CaptureVideo(ctlHandle) suffered a fatal error.");
            }
        }
Example #24
0
        void RunWorker()
        {
            try
            {

                graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                sourceObject = FilterInfo.CreateFilter(deviceMoniker);

                grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                grabberObject = grabber as IBaseFilter;

                graph.AddFilter(sourceObject, "source");
                graph.AddFilter(grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    grabber.SetMediaType(mediaType);

                    if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                            capGrabber.Width = header.BmiHeader.Width;
                            capGrabber.Height = header.BmiHeader.Height;
                        }
                    }
                    graph.Render(grabberObject.GetPin(PinDirection.Output, 0));
                    grabber.SetBufferSamples(false);
                    grabber.SetOneShot(false);
                    grabber.SetCallback(capGrabber, 1);

                    IVideoWindow wnd = (IVideoWindow)graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    control = (IMediaControl)graph;
                    control.Run();

                    while (!stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex);
            }
            finally
            {
                graph = null;
                sourceObject = null;
                grabberObject = null;
                grabber = null;
                capGrabber = null;
                control = null;

            }
        }
        // --------------------- Private Methods -----------------------
        /// <summary> 
        ///  Create a new filter graph and add filters (devices, compressors, 
        ///  misc), but leave the filters unconnected. Call renderGraph()
        ///  to connect the filters.
        /// </summary>
        /// 
        protected void createGraph()
        {
            Guid cat;
            Guid med;
            int hr;
            Type comType = null;
            object comObj = null;

            // Ensure required properties are set
            if (videoDevice == null && audioDevice == null)
                throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n");

            // Skip if we are already created
            if ((int)graphState < (int)GraphState.Created)
            {
                // Garbage collect, ensure that previous filters are released
                GC.Collect();

                // Make a new filter graph
                graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

                // Get the Capture Graph Builder
                Guid clsid = Clsid.CaptureGraphBuilder2;
                Guid riid = typeof(ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (comType == null)
                    throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!");
                comObj = Activator.CreateInstance(comType);
                sampGrabber = (ISampleGrabber)comObj; comObj = null;

                baseGrabFlt = (IBaseFilter)sampGrabber;

                // Add the graph to the Running Object Table so it can be
                // viewed with GraphEdit
            #if DEBUG
                DsROT.AddGraphToRot(graphBuilder, out rotCookie);
            #endif

                AMMediaType media = new AMMediaType();
                // Get the video device and add it to the filter graph
                if (VideoDevice != null)
                {
                    videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);
                    hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                   //  Console.WriteLine("MediaEnineCheck ==> Inside StartVideoCapture.cs before MediaSudType");
                    media.majorType = MediaType.Video;
                    media.subType = MediaSubType.RGB24; //Rajib
                    media.formatType = FormatType.VideoInfo;		// ???
                    hr = sampGrabber.SetMediaType(media);
                    if (hr < 0)
                        Marshal.ThrowExceptionForHR(hr);

                    hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Get the audio device and add it to the filter graph
                if (AudioDevice != null)
                {
                    audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(AudioDevice.MonikerString);
                    hr = graphBuilder.AddFilter(audioDeviceFilter, "Audio Capture Device");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Get the video compressor and add it to the filter graph
                if (VideoCompressor != null)
                {
                    videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(VideoCompressor.MonikerString);
                    hr = graphBuilder.AddFilter(videoCompressorFilter, "Video Compressor");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Get the audio compressor and add it to the filter graph
                if (AudioCompressor != null)
                {
                    audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(AudioCompressor.MonikerString);
                    hr = graphBuilder.AddFilter(audioCompressorFilter, "Audio Compressor");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Retrieve the stream control interface for the video device
                // FindInterface will also add any required filters
                // (WDM devices in particular may need additional
                // upstream filters to function).

                // Try looking for an interleaved media type
                object o;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                Guid iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);

                    if (hr != 0)
                        o = null;
                }
                videoStreamConfig = o as IAMStreamConfig;

                // Retrieve the stream control interface for the audio device
                o = null;
                cat = PinCategory.Capture;
                med = MediaType.Audio;
                iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, audioDeviceFilter, ref iid, out o);
                if (hr != 0)
                    o = null;
                audioStreamConfig = o as IAMStreamConfig;

                // Retreive the media control interface (for starting/stopping graph)
                mediaControl = (IMediaControl)graphBuilder;

                // Reload any video crossbars
                if (videoSources != null) videoSources.Dispose(); videoSources = null;

                // Reload any audio crossbars
                if (audioSources != null) audioSources.Dispose(); audioSources = null;

                // Reload any property pages exposed by filters
                if (propertyPages != null) propertyPages.Dispose(); propertyPages = null;

                // Reload capabilities of video device
                videoCaps = null;

                // Reload capabilities of video device
                audioCaps = null;

                // Retrieve TV Tuner if available
                o = null;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                iid = typeof(IAMTVTuner).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);
                if (hr != 0)
                {
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);
                    if (hr != 0)
                        o = null;
                }
                IAMTVTuner t = o as IAMTVTuner;
                if (t != null)
                    tuner = new Tuner(t);

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;
                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                    hr = sampGrabber.SetOneShot(false);
                if (hr == 0)
                    hr = sampGrabber.SetCallback(new SampleGrabberCallback(), 1);
                if (hr < 0)
                    Marshal.ThrowExceptionForHR(hr);
                // Update the state now that we are done
                graphState = GraphState.Created;

            }
        }
Example #26
0
        /*
        protected void InitAudioSampleGrabber()
        {
            // Get the graph builder
            IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);
            if (graphBuilder == null)
                return; 
            
            try
            {
                // Build the sample grabber
                sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
                    as ISampleGrabber;

                if (sampleGrabber == null)
                    return;

                // Add it to the filter graph
                int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber");
                DsError.ThrowExceptionForHR(hr);

                AMMediaType mtAudio = new AMMediaType();
                mtAudio.majorType = MediaType.Audio;
                mtAudio.subType = MediaSubType.PCM;
                mtAudio.formatPtr = IntPtr.Zero;

                _actualAudioFormat = null;

                hr = sampleGrabber.SetMediaType(mtAudio);
                DsError.ThrowExceptionForHR(hr);

                hr = sampleGrabber.SetBufferSamples(true);
                DsError.ThrowExceptionForHR(hr);

                hr = sampleGrabber.SetOneShot(false);
                DsError.ThrowExceptionForHR(hr);

                hr = sampleGrabber.SetCallback(this, 1);
                DsError.ThrowExceptionForHR(hr);

                sampleAnalyzerMustStop.Reset();
                sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop));
                sampleAnalyzerThread.Priority = ThreadPriority.Highest;
                sampleAnalyzerThread.Start();
            }
            catch(Exception ex)
            {
                Logger.LogException(ex);
            }

            rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
        }*/

        protected void InitAudioSampleGrabber_v2()
        {
            // Get the graph builder
            IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);
            if (graphBuilder == null)
                return;

            try
            {
                // Build the sample grabber
                sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
                    as ISampleGrabber;

                if (sampleGrabber == null)
                    return;

                // Add it to the filter graph
                int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber_v2");
                DsError.ThrowExceptionForHR(hr);

                IBaseFilter ffdAudioDecoder = null;

                IPin ffdAudioDecoderOutput = null;
                IPin soundDeviceInput = null;
                IPin sampleGrabberInput = null;
                IPin sampleGrabberOutput = null;
                IntPtr pSoundDeviceInput = IntPtr.Zero;

                // When using FFDShow, typically we'll find
                // a ffdshow Audio Decoder connected to the sound device filter
                // 
                // i.e. [ffdshow Audio Decoder] --> [DirectSound Device]
                //
                // Our audio sample grabber supports only PCM sample input and output.
                // Its entire processing is based on this assumption.
                // 
                // Thus need to insert the audio sample grabber between the ffdshow Audio Decoder and the sound device
                // because this is the only place where we can find PCM samples. The sound device only accepts PCM.
                //
                // So we need to turn this graph:
                //
                // .. -->[ffdshow Audio Decoder]-->[DirectSound Device] 
                //
                // into this:
                //
                // .. -->[ffdshow Audio Decoder]-->[Sample grabber]-->[DirectSound Device] 
                //
                // Actions to do to achieve the graph change:
                //
                // 1. Locate the ffdshow Audio Decoder in the graph
                // 2. Find its output pin and the pin that it's connected to
                // 3. Locate the input and output pins of sample grabber
                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                // 6. Connect the sample grabber output to sound device input
                // that's all.

                // --------------
                // 1. Locate the ffdshow Audio Decoder in the graph
                hr = graphBuilder.FindFilterByName("ffdshow Audio Decoder", out ffdAudioDecoder);
                DsError.ThrowExceptionForHR(hr);

                // 2. Find its output pin and the pin that it's connected to
                hr = ffdAudioDecoder.FindPin("Out", out ffdAudioDecoderOutput);
                DsError.ThrowExceptionForHR(hr);

                hr = ffdAudioDecoderOutput.ConnectedTo(out pSoundDeviceInput);
                DsError.ThrowExceptionForHR(hr);

                soundDeviceInput = new DSPin(pSoundDeviceInput).Value;

                // 3. Locate the input and output pins of sample grabber
                hr = (sampleGrabber as IBaseFilter).FindPin("In", out sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                hr = (sampleGrabber as IBaseFilter).FindPin("Out", out sampleGrabberOutput);
                DsError.ThrowExceptionForHR(hr);

                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                hr = ffdAudioDecoderOutput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                hr = soundDeviceInput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                hr = graphBuilder.Connect(ffdAudioDecoderOutput, sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                // 6. Connect the sample grabber output to sound device input
                hr = graphBuilder.Connect(sampleGrabberOutput, soundDeviceInput);
                DsError.ThrowExceptionForHR(hr);


                AMMediaType mtAudio = new AMMediaType();
                mtAudio.majorType = MediaType.Audio;
                mtAudio.subType = MediaSubType.PCM;
                mtAudio.formatPtr = IntPtr.Zero;

                _actualAudioFormat = null;

                sampleGrabber.SetMediaType(mtAudio);
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(this, 1);

                sampleAnalyzerMustStop.Reset();
                sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop));
                sampleAnalyzerThread.Priority = ThreadPriority.Highest;
                sampleAnalyzerThread.Start();
            }
            catch (Exception ex)
            {
                Logger.LogException(ex);
            }

            rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
        }
Example #27
0
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread( )
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object sourceObject  = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder     graph         = null;
            IBaseFilter       sourceBase    = null;
            IBaseFilter       grabberBase   = null;
            ISampleGrabber    sampleGrabber = null;
            IMediaControl     mediaControl  = null;
            IFileSourceFilter fileSource    = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                type = Type.GetTypeFromCLSID(Clsid.AsyncReader);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter async reader");
                }

                sourceObject = Activator.CreateInstance(type);
                sourceBase   = (IBaseFilter)sourceObject;
                fileSource   = (IFileSourceFilter)sourceObject;

                fileSource.Load(fileName, null);

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add source and grabber filters to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                if (graph.Connect(Tools.GetOutPin(sourceBase, 0), Tools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose( );
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // run
                mediaControl.Run( );

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mediaControl.StopWhenReady( );
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                sourceBase    = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                fileSource    = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceObject != null)
                {
                    Marshal.ReleaseComObject(sourceObject);
                    sourceObject = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }
        }
Example #28
0
        /// <summary> build the capture graph. </summary>
        bool SetupGraph()
        {
            int             hr;
            IBaseFilter     mux  = null;
            IFileSinkFilter sink = null;


            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;        // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }


                Guid cat = PinCategory.Preview;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // preview
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // capture
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee)
            {
                MessageBox.Show("Could not setup graph\r\n" + ee.Message, "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return(false);
            }
            finally
            {
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                }
                mux = null;
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                }
                sink = null;
            }
        }
        private void ApplyVideoInput()
        {
            int iRet;
            Dispose();

            /*Frame = new byte[(width * height) * PixelSize];
            CapturedFrame = new byte[(width * height) * PixelSize];
            PreviewFrame = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];*/

            if (VideoInput == null)
            {
                return;
            }

            //Original Code
            GraphBuilder = (IGraphBuilder)new FilterGraph();
            CaptureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            MediaControl = (IMediaControl)GraphBuilder;
            iRet = CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetFiltergraph");

            SampleGrabber = new SampleGrabber() as ISampleGrabber;
            iRet = GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 1");

            SetResolution(width, height);
            iRet = GraphBuilder.AddFilter(VideoInput, "Camera");

            if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 2");
            iRet = SampleGrabber.SetBufferSamples(true);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetBufferSamples");
            iRet = SampleGrabber.SetOneShot(false);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetOneShot");

            iRet = SampleGrabber.SetCallback(this, 1);

            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetCallback");

            iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, SampleGrabber as IBaseFilter);
            if (iRet < 0)
            {
                Console.WriteLine("TheKing--> Error Found in  CaptureGraphBuilder.RenderStream, iRet = " + iRet+", Initialization TryNumber = " + counter);
                if(counter == 1)
                    ApplyVideoInput();
            }

            //GraphBuilder.Connect()
            //iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, null);
            //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 1");

            //iRet = CaptureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter);
            //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 2, iRet = " + iRet);

            if (UpdateThread != null)
            {
                UpdateThread.Abort();
            }

            //UpdateThread = new Thread(UpdateBuffer);
            //UpdateThread.Start();

            MediaControl.Run();

            Marshal.ReleaseComObject(VideoInput);
        }
Example #30
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString);

                // Create the grabber
                m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                m_grabberObject = m_isplGrabber as IBaseFilter;

                // Add the source and grabber to the main graph
                m_igrphbldGraph.AddFilter(m_sourceObject, "source");
                m_igrphbldGraph.AddFilter(m_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    m_isplGrabber.SetMediaType(mediaType);

                    if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    m_grbrCapGrabber.Width = header.BmiHeader.Width;
                                    m_grbrCapGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0));
                    m_isplGrabber.SetBufferSamples(false);
                    m_isplGrabber.SetOneShot(false);
                    m_isplGrabber.SetCallback(m_grbrCapGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    m_imedctrlControl = (IMediaControl)m_igrphbldGraph;
                    m_imedctrlControl.Run();

                    // Wait for the stop signal
                    while (!m_rstevStopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    // _control.StopWhenReady();
                    m_imedctrlControl.Stop();

                    // Wait a bit... It apparently takes some time to stop IMediaControl
                    Thread.Sleep(1000);
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                this.Release();
            }
        }
Example #31
0
        /// <summary>
        ///  Create a new filter graph and add filters (devices, compressors, misc),
        ///  but leave the filters unconnected. Call RenderGraph()
        ///  to connect the filters.
        /// </summary>
        void CreateGraph()
        {
            //Skip if already created
            if ((int)_actualGraphState < (int)GraphState.Created)
            {
                // Make a new filter graph
                _graphBuilder = (IGraphBuilder) new FilterGraph();

                // Get the Capture Graph Builder
                _captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

                // Link the CaptureGraphBuilder to the filter graph
                var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                var comObj = new SampleGrabber();
                _sampGrabber = (ISampleGrabber)comObj;

                _baseGrabFlt = (IBaseFilter)_sampGrabber;

                var media = new AMMediaType();
                // Get the video device and add it to the filter graph
                if (VideoDevice != null)
                {
                    _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);

                    hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }

                    media.majorType           = MediaType.Video;
                    media.subType             = MediaSubType.RGB32; //RGB24;
                    media.formatType          = FormatType.VideoInfo;
                    media.temporalCompression = true;               //New

                    hr = _sampGrabber.SetMediaType(media);

                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }

                    hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                }

                // Retrieve the stream control interface for the video device
                // FindInterface will also add any required filters
                // (WDM devices in particular may need additional
                // upstream filters to function).

                // Try looking for an interleaved media type
                var cat = PinCategory.Capture;
                var med = MediaType.Interleaved;
                var iid = typeof(IAMStreamConfig).GUID;
                hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out var o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr  = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out o);

                    if (hr != 0)
                    {
                        // ReSharper disable once RedundantAssignment
                        o = null;
                    }
                }

                //VideoStreamConfig = o as IAMStreamConfig;

                // Retreive the media control interface (for starting/stopping graph)
                _mediaControl = (IMediaControl)_graphBuilder;

                // Reload any video crossbars
                //if (videoSources != null) videoSources.Dispose(); videoSources = null;

                _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr);
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = _sampGrabber.SetBufferSamples(true);

                if (hr == 0)
                {
                    hr = _sampGrabber.SetOneShot(false);
                }

                if (hr == 0)
                {
                    hr = _sampGrabber.SetCallback(null, 0);
                }

                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }

            // Update the state now that we are done
            _actualGraphState = GraphState.Created;
        }
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber       = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType   = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int  retryCount = 0;
                            bool succeeded  = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width  = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();

                    // Wait for the stop signal
                    while (!_stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    _control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                Release();
            }
        }
Example #33
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void Init()
        {
            try
            {
                log.Trace("Start worker thread");
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch
                                {
                                    // Trace
                                    log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);
                    log.Trace("_grabber set up");

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();
                    log.Trace("control runs");

                    // Wait for the stop signal
                    //while (!_stopSignal.WaitOne(0, true))
                    //{
                    //    Thread.Sleep(10);
                    //}
                }
            }catch (Exception ex)
            {
                // Trace
                log.Debug(ex);
                Release();
            }
        }
Example #34
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        public XNAPlayer(Feel feel, string FileName, GraphicsDevice graphicsDevice, Action callback)
        {
            Utils.RunAsynchronously(() =>
            {
                try
                {
                    // Set video state
                    currentState = VideoState.Stopped;

                    // Store Filename
                    filename = FileName;

                    // Open DirectShow Interfaces
                    InitInterfaces();

                    // Create a SampleGrabber Filter and add it to the FilterGraph
                    SampleGrabber sg             = new SampleGrabber();
                    ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                    DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber"));

                    // Setup Media type info for the SampleGrabber
                    AMMediaType mt = new AMMediaType();
                    mt.majorType   = MEDIATYPE_Video;    // Video
                    mt.subType     = MEDIASUBTYPE_RGB24; // RGB24
                    mt.formatType  = FORMAT_VideoInfo;   // VideoInfo
                    DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));

                    // Construct the rest of the FilterGraph
                    DsError.ThrowExceptionForHR(gb.RenderFile(filename, null));

                    // Set SampleGrabber Properties
                    DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                    DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                    DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));

                    // Hide Default Video Window
                    IVideoWindow pVideoWindow = (IVideoWindow)gb;
                    DsError.ThrowExceptionForHR(pVideoWindow.put_MessageDrain(IntPtr.Zero));
                    DsError.ThrowExceptionForHR(pVideoWindow.put_WindowState(WindowState.Hide));
                    DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));

                    // Create AMMediaType to capture video information
                    AMMediaType MediaType = new AMMediaType();
                    DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                    VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                    Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                    // Store video information
                    videoHeight     = pVideoHeader.BmiHeader.Height;
                    videoWidth      = pVideoHeader.BmiHeader.Width;
                    avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                    bitRate         = pVideoHeader.BitRate;
                    DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration));

                    // Create byte arrays to hold video data
                    videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel)
                    bgrData         = new byte[(videoHeight * videoWidth) * 4]; // BGR24 format (3 bytes per pixel + 1 for safety)

                    // Create Output Frame Texture2D with the height and width of the video
                    outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color);

                    feel.RunOnUIThread(callback);
                }
                catch
                {
                    feel.ShowToast("Unable to Load or Play the video file");
                }
            }, () => { });
        }
Example #35
0
        private void setupDirectShowFilterGraph()
        {
            if (mediaControl != null && running)
            {
                Stop();
            }

            if (device == null)
            {
                filterGraph  = null;
                mediaControl = null;
            }
            else
            {
                filterGraph  = (IFilterGraph2) new FilterGraph();
                mediaControl = (IMediaControl)filterGraph;
                ICaptureGraphBuilder2 captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
                captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph);

                // capture filter
                IBaseFilter captureFilter;
                filterGraph.AddSourceFilterForMoniker(device.Moniker, null, device.Name, out captureFilter);

                // sample grabber
                ISampleGrabber sampleGrabber       = (ISampleGrabber) new SampleGrabber();
                IBaseFilter    sampleGrabberFilter = (IBaseFilter)sampleGrabber;
                AMMediaType    mediaType           = new AMMediaType();
                mediaType.majorType  = new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);                    // MEDIATYPE_Video
                mediaType.subType    = new Guid(0xe436eb7d, 0x524f, 0x11ce, 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70);                    // MEDIASUBTYPE_RGB24
                mediaType.formatType = new Guid(0x05589f80, 0xc356, 0x11ce, 0xbf, 0x01, 0x00, 0xaa, 0x00, 0x55, 0x59, 0x5a);                    // FORMAT_VideoInfo
                sampleGrabber.SetMediaType(mediaType);
                mediaType.Free();
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetCallback(this, 1);
                filterGraph.AddFilter(sampleGrabberFilter, "ZunTzu Sample Grabber");

                // configure the video stream to 160x120@15fps
                object interfaceFound;
                captureGraphBuilder.FindInterface(
                    new Guid(0xfb6c4281, 0x0353, 0x11d1, 0x90, 0x5f, 0x00, 0x00, 0xc0, 0xcc, 0x16, 0xba),                       // PIN_CATEGORY_CAPTURE
                    new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71),                       // MEDIATYPE_Video
                    captureFilter, typeof(IAMStreamConfig).GUID, out interfaceFound);
                IAMStreamConfig videoStreamConfig = (IAMStreamConfig)interfaceFound;
                videoStreamConfig.GetFormat(out mediaType);
                VideoInfoHeader infoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(mediaType.formatPtr, infoHeader);
                infoHeader.AvgTimePerFrame  = 10000000 / 15;
                infoHeader.BmiHeader.Width  = 160;
                infoHeader.BmiHeader.Height = 120;
                Marshal.StructureToPtr(infoHeader, mediaType.formatPtr, false);
                videoStreamConfig.SetFormat(mediaType);
                mediaType.Free();

                // renderer
                IBaseFilter nullRenderer = (IBaseFilter) new NullRenderer();
                filterGraph.AddFilter(nullRenderer, "Null Renderer");

                captureGraphBuilder.RenderStream(
                    new Guid(0xfb6c4281, 0x0353, 0x11d1, 0x90, 0x5f, 0x00, 0x00, 0xc0, 0xcc, 0x16, 0xba),                       // PIN_CATEGORY_CAPTURE
                    new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71),                       // MEDIATYPE_Video
                    captureFilter, sampleGrabberFilter, nullRenderer);

                // retrieve frame size
                sampleGrabber.GetConnectedMediaType(mediaType);
                Marshal.PtrToStructure(mediaType.formatPtr, infoHeader);
                frameRate = 10000000.0f / infoHeader.AvgTimePerFrame;
                frameSize = new Size(infoHeader.BmiHeader.Width, infoHeader.BmiHeader.Height);
                mediaType.Free();
            }
        }
Example #36
0
        public override int Initialize()
        {
            if (!File.Exists(filename))
            {
                return(-1);
            }

            try
            {
                int hr = 0;
                graphBuilder2   = (IFilterGraph2) new FilterGraph();
                lavSplitter     = new LAVSplitter() as IBaseFilter;
                lavVideoDecoder = new LAVVideoDecoder() as IBaseFilter;
                lavAudioDecoder = new LAVAudioDecoder() as IBaseFilter;
                var lavSplitterSource = lavSplitter as IFileSourceFilter;
                soundDevice   = new DirectSoundDevice() as IBaseFilter;
                videoRenderer = new VideoRenderer() as IBaseFilter;
                lavSplitterSource.Load(filename, null);
                hr = graphBuilder2.AddFilter(lavSplitter, "LAV Splitter");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(lavVideoDecoder, "LAV Video Decoder");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(lavAudioDecoder, "LAV Audio Decoder");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(soundDevice, "Default Direct Sound Device");
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.AddFilter(videoRenderer, "Video Renderer");
                DsError.ThrowExceptionForHR(hr);
                var videoPin              = GetPin(lavSplitter, "Video");
                var audioPin              = GetPin(lavSplitter, "Audio");
                var videoDecoderInputPin  = GetPin(lavVideoDecoder, "Input");
                var videoDecoderOutputPin = GetPin(lavVideoDecoder, "Output");
                var audioDecoderInputPin  = GetPin(lavAudioDecoder, "Input");
                var audioDecoderOutputPin = GetPin(lavAudioDecoder, "Output");
                var soundInputPin         = GetPin(soundDevice, "Audio Input pin (rendered)");
                var videoRendererInputPin = GetPin(videoRenderer, "Input");
                hr = graphBuilder2.Connect(videoPin, videoDecoderInputPin);
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.Connect(audioPin, audioDecoderInputPin);
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.Connect(audioDecoderOutputPin, soundInputPin);
                DsError.ThrowExceptionForHR(hr);
                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                var amMediaType = new AMMediaType
                {
                    majorType  = MediaType.Video,
                    subType    = MediaSubType.RGB32,
                    formatType = FormatType.VideoInfo
                };
                hr = sampleGrabber.SetMediaType(amMediaType);
                DsError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(amMediaType);
                hr = graphBuilder2.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber");
                DsError.ThrowExceptionForHR(hr);
                var sampleGrabberInputPin  = GetPin((IBaseFilter)sampleGrabber, "Input");
                var sampleGrabberOutputPin = GetPin((IBaseFilter)sampleGrabber, "Output");
                hr = graphBuilder2.Connect(videoDecoderOutputPin, sampleGrabberInputPin);
                DsError.ThrowExceptionForHR(hr);
                hr = graphBuilder2.Connect(sampleGrabberOutputPin, videoRendererInputPin);
                DsError.ThrowExceptionForHR(hr);
                base.Initialize();
                sampleGrabber.SetCallback(this, 1);
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                var mediaType = new AMMediaType();
                videoPin.ConnectionMediaType(mediaType);
                var bitmapInfoHeader = (BitmapInfoHeader)mediaType;
                this.width  = bitmapInfoHeader.Width;
                this.height = bitmapInfoHeader.Height;
                this.maxu   = 1;
                this.maxv   = 1;
                textures    = new TextureBase[5];
                for (var i = 0; i < textures.Length; i++)
                {
                    textures[i] = TextureFactoryManager.Factory.Create(device, width, height, 1, false);
                }

                videoWindow = (IVideoWindow)graphBuilder2;

                hr = videoWindow.put_Visible((int)OABool.False);
                DsError.ThrowExceptionForHR(hr);
                hr = videoWindow.put_WindowState((int)WindowState.Hide);
                DsError.ThrowExceptionForHR(hr);
                hr = videoWindow.SetWindowPosition(-1000, -1000, 10, 10);
                DsError.ThrowExceptionForHR(hr);
                videoWindow.put_AutoShow((int)OABool.False);
                DsError.ThrowExceptionForHR(hr);
                hr = hr = videoWindow.put_Owner(MovieUtility.Window);
                DsError.ThrowExceptionForHR(hr);
            }
            catch (Exception e)
            {
                throw new Exception("Fatal Error in Movie Loading", e);
            }
            return(0);
        }
Example #37
0
        /// <summary> Set the options on the sample grabber </summary>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber, int width, int height)
        {
            int hr;
            AMMediaType media = new AMMediaType();
            //VideoInfoHeader v;

            // copy out the videoinfoheader
            //v = new VideoInfoHeader();
            //Marshal.PtrToStructure(media.formatPtr, v);

            //// Set the size
            //v.BmiHeader.Width = width;
            //v.BmiHeader.Height = height;
            
            // Copy the media structure back
            //Marshal.StructureToPtr(v, media.formatPtr, false);

            // Set the media type to Video/RBG24
            media.majorType = MediaType.Video;
            media.subType = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            hr = sampGrabber.SetBufferSamples(false);
            hr = sampGrabber.SetOneShot(false);


            // Configure the samplegrabber callback
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Example #38
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int hr;

            // Set the media type to Video/RBG24
            media = new AMMediaType();
            media.majorType = MediaType.Video;
            media.subType = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            sampGrabber.SetBufferSamples(false);
            sampGrabber.SetOneShot(false);
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Example #39
0
        private void WorkerThread(bool runGraph)
        {
            ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser;
            bool    flag         = false;
            Grabber grabber      = new Grabber(this, snapshotMode: false);
            Grabber grabber2     = new Grabber(this, snapshotMode: true);
            object  obj          = null;
            object  obj2         = null;
            object  obj3         = null;
            object  obj4         = null;
            object  retInterface = null;
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            IFilterGraph2         filterGraph         = null;
            IBaseFilter           baseFilter          = null;
            IBaseFilter           baseFilter2         = null;
            IBaseFilter           baseFilter3         = null;
            ISampleGrabber        sampleGrabber       = null;
            ISampleGrabber        sampleGrabber2      = null;
            IMediaControl         mediaControl        = null;
            IAMVideoControl       iAMVideoControl     = null;
            IMediaEventEx         mediaEventEx        = null;
            IPin        pin         = null;
            IAMCrossbar iAMCrossbar = null;

            try
            {
                Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating capture graph builder");
                }
                obj = Activator.CreateInstance(typeFromCLSID);
                captureGraphBuilder = (ICaptureGraphBuilder2)obj;
                typeFromCLSID       = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }
                obj2        = Activator.CreateInstance(typeFromCLSID);
                filterGraph = (IFilterGraph2)obj2;
                captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph);
                sourceObject = FilterInfo.CreateFilter(deviceMoniker);
                if (sourceObject == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }
                baseFilter = (IBaseFilter)sourceObject;
                try
                {
                    iAMVideoControl = (IAMVideoControl)sourceObject;
                }
                catch
                {
                }
                typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }
                obj3           = Activator.CreateInstance(typeFromCLSID);
                sampleGrabber  = (ISampleGrabber)obj3;
                baseFilter2    = (IBaseFilter)obj3;
                obj4           = Activator.CreateInstance(typeFromCLSID);
                sampleGrabber2 = (ISampleGrabber)obj4;
                baseFilter3    = (IBaseFilter)obj4;
                filterGraph.AddFilter(baseFilter, "source");
                filterGraph.AddFilter(baseFilter2, "grabber_video");
                filterGraph.AddFilter(baseFilter3, "grabber_snapshot");
                AMMediaType aMMediaType = new AMMediaType();
                aMMediaType.MajorType = MediaType.Video;
                aMMediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(aMMediaType);
                sampleGrabber2.SetMediaType(aMMediaType);
                captureGraphBuilder.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, baseFilter, typeof(IAMCrossbar).GUID, out retInterface);
                if (retInterface != null)
                {
                    iAMCrossbar = (IAMCrossbar)retInterface;
                }
                isCrossbarAvailable = (iAMCrossbar != null);
                crossbarVideoInputs = ColletCrossbarVideoInputs(iAMCrossbar);
                if (iAMVideoControl != null)
                {
                    captureGraphBuilder.FindPin(sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, unconnected: false, 0, out pin);
                    if (pin != null)
                    {
                        iAMVideoControl.GetCaps(pin, out VideoControlFlags flags);
                        flag = ((flags & VideoControlFlags.ExternalTriggerEnable) != 0);
                    }
                }
                sampleGrabber.SetBufferSamples(bufferThem: false);
                sampleGrabber.SetOneShot(oneShot: false);
                sampleGrabber.SetCallback(grabber, 1);
                sampleGrabber2.SetBufferSamples(bufferThem: true);
                sampleGrabber2.SetOneShot(oneShot: false);
                sampleGrabber2.SetCallback(grabber2, 1);
                GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.Capture, videoResolution, ref videoCapabilities);
                if (flag)
                {
                    GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities);
                }
                else
                {
                    snapshotCapabilities = new VideoCapabilities[0];
                }
                lock (cacheVideoCapabilities)
                {
                    if (videoCapabilities != null && !cacheVideoCapabilities.ContainsKey(deviceMoniker))
                    {
                        cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities);
                    }
                }
                lock (cacheSnapshotCapabilities)
                {
                    if (snapshotCapabilities != null && !cacheSnapshotCapabilities.ContainsKey(deviceMoniker))
                    {
                        cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities);
                    }
                }
                if (runGraph)
                {
                    captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, baseFilter, null, baseFilter2);
                    if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0)
                    {
                        VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader));
                        grabber.Width  = videoInfoHeader.BmiHeader.Width;
                        grabber.Height = videoInfoHeader.BmiHeader.Height;
                        aMMediaType.Dispose();
                    }
                    if (flag && provideSnapshots)
                    {
                        captureGraphBuilder.RenderStream(PinCategory.StillImage, MediaType.Video, baseFilter, null, baseFilter3);
                        if (sampleGrabber2.GetConnectedMediaType(aMMediaType) == 0)
                        {
                            VideoInfoHeader videoInfoHeader2 = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader));
                            grabber2.Width  = videoInfoHeader2.BmiHeader.Width;
                            grabber2.Height = videoInfoHeader2.BmiHeader.Height;
                            aMMediaType.Dispose();
                        }
                    }
                    mediaControl = (IMediaControl)obj2;
                    mediaEventEx = (IMediaEventEx)obj2;
                    mediaControl.Run();
                    if (flag && provideSnapshots)
                    {
                        startTime = DateTime.Now;
                        iAMVideoControl.SetMode(pin, VideoControlFlags.ExternalTriggerEnable);
                    }
                    do
                    {
                        if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0)
                        {
                            mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2);
                            if (lEventCode == DsEvCode.DeviceLost)
                            {
                                reason = ReasonToFinishPlaying.DeviceLost;
                                break;
                            }
                        }
                        if (needToSetVideoInput)
                        {
                            needToSetVideoInput = false;
                            if (isCrossbarAvailable.Value)
                            {
                                SetCurrentCrossbarInput(iAMCrossbar, crossbarVideoInput);
                                crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar);
                            }
                        }
                        if (needToSimulateTrigger)
                        {
                            needToSimulateTrigger = false;
                            if (flag && provideSnapshots)
                            {
                                iAMVideoControl.SetMode(pin, VideoControlFlags.Trigger);
                            }
                        }
                        if (needToDisplayPropertyPage)
                        {
                            needToDisplayPropertyPage = false;
                            DisplayPropertyPage(parentWindowForPropertyPage, sourceObject);
                            if (iAMCrossbar != null)
                            {
                                crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar);
                            }
                        }
                        if (needToDisplayCrossBarPropertyPage)
                        {
                            needToDisplayCrossBarPropertyPage = false;
                            if (iAMCrossbar != null)
                            {
                                DisplayPropertyPage(parentWindowForPropertyPage, iAMCrossbar);
                                crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar);
                            }
                        }
                    }while (!stopEvent.WaitOne(100, exitContext: false));
                    mediaControl.Stop();
                }
            }
            catch (Exception ex)
            {
                if (this.VideoSourceError != null)
                {
                    this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message));
                }
            }
            finally
            {
                captureGraphBuilder = null;
                filterGraph         = null;
                baseFilter          = null;
                mediaControl        = null;
                iAMVideoControl     = null;
                mediaEventEx        = null;
                pin            = null;
                iAMCrossbar    = null;
                baseFilter2    = null;
                baseFilter3    = null;
                sampleGrabber  = null;
                sampleGrabber2 = null;
                if (obj2 != null)
                {
                    Marshal.ReleaseComObject(obj2);
                    obj2 = null;
                }
                if (sourceObject != null)
                {
                    Marshal.ReleaseComObject(sourceObject);
                    sourceObject = null;
                }
                if (obj3 != null)
                {
                    Marshal.ReleaseComObject(obj3);
                    obj3 = null;
                }
                if (obj4 != null)
                {
                    Marshal.ReleaseComObject(obj4);
                    obj4 = null;
                }
                if (obj != null)
                {
                    Marshal.ReleaseComObject(obj);
                    obj = null;
                }
                if (retInterface != null)
                {
                    Marshal.ReleaseComObject(retInterface);
                    retInterface = null;
                }
            }
            if (this.PlayingFinished != null)
            {
                this.PlayingFinished(this, reason);
            }
        }
Example #40
0
        /// <summary> build the capture graph for grabber. </summary>
        bool SetupGraph()
        {
            int hr;

            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, " Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                DsUtils.ShowCapPinDialog(capGraph, capFilter, this.Handle);

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;                // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, " Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Guid cat = PinCategory.Preview;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee)
            {
                MessageBox.Show(this, "Could not setup graph\r\n" + ee.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Stop);

                return(false);
            }
        }
Example #41
0
        /// <summary>
        ///  Create a new filter graph and add filters (devices, compressors, misc),
        ///  but leave the filters unconnected. Call RenderGraph()
        ///  to connect the filters.
        /// </summary>
        void CreateGraph()
        {
            //Skip if already created
            if ((int)_actualGraphState < (int)GraphState.Created)
            {
                // Make a new filter graph
                _graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Uuid.Clsid.FilterGraph, true));

                // Get the Capture Graph Builder
                var clsid = Uuid.Clsid.CaptureGraphBuilder2;
                var riid  = typeof(ICaptureGraphBuilder2).GUID;
                _captureGraphBuilder = (ICaptureGraphBuilder2)Workaround.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                var comType = Type.GetTypeFromCLSID(Uuid.Clsid.SampleGrabber);
                if (comType == null)
                {
                    throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!");
                }
                var comObj = Activator.CreateInstance(comType);
                _sampGrabber = (ISampleGrabber)comObj;

                _baseGrabFlt = (IBaseFilter)_sampGrabber;

                var media = new AMMediaType();
                // Get the video device and add it to the filter graph
                if (VideoDevice != null)
                {
                    _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);

                    hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }

                    media.majorType           = Uuid.MediaType.Video;
                    media.subType             = Uuid.MediaSubType.Rgb32; //RGB24;
                    media.formatType          = Uuid.FormatType.VideoInfo;
                    media.temporalCompression = true;                    //New

                    hr = _sampGrabber.SetMediaType(media);

                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }

                    hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                }

                // Retrieve the stream control interface for the video device
                // FindInterface will also add any required filters
                // (WDM devices in particular may need additional
                // upstream filters to function).

                // Try looking for an interleaved media type
                var cat = Uuid.PinCategory.Capture;
                var med = Uuid.MediaType.Interleaved;
                var iid = typeof(IAMStreamConfig).GUID;
                hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out object o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = Uuid.MediaType.Video;
                    hr  = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out o);

                    if (hr != 0)
                    {
                        o = null;
                    }
                }

                //VideoStreamConfig = o as IAMStreamConfig;

                // Retreive the media control interface (for starting/stopping graph)
                _mediaControl = (IMediaControl)_graphBuilder;

                // Reload any video crossbars
                //if (videoSources != null) videoSources.Dispose(); videoSources = null;

                _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr);
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = _sampGrabber.SetBufferSamples(true);

                if (hr == 0)
                {
                    hr = _sampGrabber.SetOneShot(false);
                }

                if (hr == 0)
                {
                    hr = _sampGrabber.SetCallback(null, 0);
                }

                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }

            // Update the state now that we are done
            _actualGraphState = GraphState.Created;
        }
Example #42
0
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread( )
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder  graph         = null;
            IBaseFilter    sourceBase    = null;
            IBaseFilter    grabberBase   = null;
            ISampleGrabber sampleGrabber = null;
            IMediaControl  mediaControl  = null;

            IMediaEventEx mediaEvent = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                graph.AddSourceFilter(fileName, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add grabber filters to graph
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                int pinToTry = 0;

                IPin inPin  = Tools.GetInPin(grabberBase, 0);
                IPin outPin = null;

                // find output pin acceptable by sample grabber
                while (true)
                {
                    outPin = Tools.GetOutPin(sourceBase, pinToTry);

                    if (outPin == null)
                    {
                        Marshal.ReleaseComObject(inPin);
                        throw new ApplicationException("Did not find acceptable output video pin in the given source");
                    }

                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        Marshal.ReleaseComObject(outPin);
                        outPin = null;
                        pinToTry++;
                    }
                    else
                    {
                        break;
                    }
                }

                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose( );
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // disable clock, if someone requested it
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;
                IntPtr   p1, p2;
                DsEvCode code;

                // run
                mediaControl.Run( );

                do
                {
                    if (mediaEvent != null)
                    {
                        if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                        {
                            mediaEvent.FreeEventParams(code, p1, p2);

                            if (code == DsEvCode.Complete)
                            {
                                reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
                                break;
                            }
                        }
                    }
                }while (!stopEvent.WaitOne(100, false));

                mediaControl.Stop( );
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                mediaEvent    = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceBase != null)
                {
                    Marshal.ReleaseComObject(sourceBase);
                    sourceBase = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
Example #43
0
        public void CreateGraph()
        {
            try
            {
                int result = 0;

                // フィルタグラフマネージャ作成
                graphBuilder = new FilterGraph() as IFilterGraph2;

                // キャプチャグラフビルダ作成
                captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;

                //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = captureGraphBuilder.SetFiltergraph(graphBuilder);
                DsError.ThrowExceptionForHR(result);

                // ソースフィルタ作成
                // キャプチャデバイスをソースフィルタに対応付ける
                captureFilter = null;
                result = graphBuilder.AddSourceFilterForMoniker(
                    _capDevice.Mon, null, _capDevice.Name, out captureFilter);
                DsError.ThrowExceptionForHR(result);

                // サンプルグラバ作成
                sampleGrabber = new SampleGrabber() as ISampleGrabber;

                // フィルタと関連付ける
                IBaseFilter grabFilter = sampleGrabber as IBaseFilter;

                // キャプチャするオーディオのフォーマットを設定
                AMMediaType amMediaType = new AMMediaType();
                amMediaType.majorType = MediaType.Audio;
                amMediaType.subType = MediaSubType.PCM;
                amMediaType.formatPtr = IntPtr.Zero;
                result = sampleGrabber.SetMediaType(amMediaType);
                DsError.ThrowExceptionForHR(result);
                DsUtils.FreeAMMediaType(amMediaType);

                // callback 登録
                sampleGrabber.SetOneShot(false);
                DsError.ThrowExceptionForHR(result);

                result = sampleGrabber.SetBufferSamples(true);
                DsError.ThrowExceptionForHR(result);

                // キャプチャするフォーマットを取得
                object o;
                result = captureGraphBuilder.FindInterface(
                    DsGuid.FromGuid(PinCategory.Capture),
                    DsGuid.FromGuid(MediaType.Audio),
                    captureFilter,
                    typeof(IAMStreamConfig).GUID, out o);
                DsError.ThrowExceptionForHR(result);
                IAMStreamConfig config = o as IAMStreamConfig;
                AMMediaType media;
                result = config.GetFormat(out media);
                DsError.ThrowExceptionForHR(result);

                WaveFormatEx wf = new WaveFormatEx();
                Marshal.PtrToStructure(media.formatPtr, wf);

                CaptureOption opt = new CaptureOption(wf);
                _sampler = new DSAudioSampler(opt);

                DsUtils.FreeAMMediaType(media);
                Marshal.ReleaseComObject(config);

                result = sampleGrabber.SetCallback(_sampler, 1);
                DsError.ThrowExceptionForHR(result);

                //grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = graphBuilder.AddFilter(grabFilter, "Audio Grab Filter");
                DsError.ThrowExceptionForHR(result);

                //キャプチャフィルタをサンプルグラバーフィルタに接続する
                result = captureGraphBuilder.RenderStream(
                    DsGuid.FromGuid(PinCategory.Capture),
                    DsGuid.FromGuid(MediaType.Audio),
                    captureFilter, null, grabFilter);
                DsError.ThrowExceptionForHR(result);
            }
            catch (Exception ex)
            {
                System.Windows.MessageBox.Show(ex.Message);
            }
        }