コード例 #1
0
        void SetupPlaybackGraph(string fname)
        {
            int hr;

            try
            {
                hr = graphBuilder.RenderFile(fname, null);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;                                // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(smartTee, "smartTee");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                IBaseFilter renderer;
                hr = graphBuilder.FindFilterByName("Video Renderer", out renderer);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                IPin inPin;
                IPin srcPin;

                hr = DsUtils.GetPin(renderer, PinDirection.Input, out inPin, 0);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = inPin.ConnectedTo(out srcPin);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = srcPin.Disconnect();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.RemoveFilter(renderer);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                Marshal.ReleaseComObject(renderer);
                Marshal.ReleaseComObject(inPin);

                hr = DsUtils.GetPin(smartTee, PinDirection.Input, out inPin, 0);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.Connect(srcPin, inPin);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                Marshal.ReleaseComObject(srcPin);
                Marshal.ReleaseComObject(inPin);
                srcPin = inPin = null;

                hr = DsUtils.GetPin(smartTee, PinDirection.Output, out srcPin, 1);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // grabber Input
                hr = DsUtils.GetPin(baseGrabFlt, PinDirection.Input, out inPin, 0);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // smartTee -> grabber
                hr = graphBuilder.Connect(srcPin, inPin);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                Marshal.ReleaseComObject(srcPin);
                Marshal.ReleaseComObject(inPin);
                srcPin = inPin = null;


                if (preview)
                {
                    // grabber Input
                    hr = DsUtils.GetPin(smartTee, PinDirection.Output, out srcPin, 0);
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }

                    hr = graphBuilder.Render(srcPin);
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                    Marshal.ReleaseComObject(srcPin);
                    srcPin = null;
                }


                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr);
                media.formatPtr = IntPtr.Zero;

                //Modified according to the platform SDK, to capture the buffer
                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(sampleGrabber, 1);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }
            catch (Exception ee)
            {
                throw new Exception("Could not setup graph\r\n" + ee.Message);
            }
        }
コード例 #2
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        public VideoPlayer(string FileName, GraphicsDevice graphicsDevice)
        {
            try
            {
                // Set video state
                currentState = VideoState.Stopped;

                // Store Filename
                filename = FileName;

                // Open DirectShow Interfaces
                InitInterfaces();

                // Create a SampleGrabber Filter and add it to the FilterGraph
                SampleGrabber  sg            = new SampleGrabber();
                ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber"));

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();
                mt.majorType  = MEDIATYPE_Video;    // Video
                mt.subType    = MEDIASUBTYPE_RGB24; // RGB24
                mt.formatType = FORMAT_VideoInfo;   // VideoInfo
                DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));

                // Construct the rest of the FilterGraph
                DsError.ThrowExceptionForHR(gb.RenderFile(filename, null));

                // Set SampleGrabber Properties
                DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)gb;
                DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));

                // Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                videoHeight     = pVideoHeader.BmiHeader.Height;
                videoWidth      = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate         = pVideoHeader.BitRate;
                DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration));

                // Create byte arrays to hold video data
                videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel)
                bgrData         = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel)

                // Create Output Frame Texture2D with the height and width of the video
                outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color);
            }
            catch
            {
                throw new Exception("Unable to Load or Play the video file");
            }
        }
コード例 #3
0
        // Thread entry point
        public void WorkerThread()
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder  graph       = null;
            IBaseFilter    sourceBase  = null;
            IBaseFilter    grabberBase = null;
            ISampleGrabber sg          = null;
            IMediaControl  mc          = null;

            try
            {
                // Get type for filter graph
                Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObj = Activator.CreateInstance(srvType);
                graph    = (IGraphBuilder)graphObj;
                int n = 0;

                // create bind context
                if (Win32.CreateBindCtx(0, out UCOMIBindCtx bindCtx) == 0)
                {
                    // ----
                    // convert moniker`s string to a moniker
                    if (Win32.MkParseDisplayName(bindCtx, source, ref n, out UCOMIMoniker moniker) == 0)
                    {
                        // get device base filter
                        Guid filterId = typeof(IBaseFilter).GUID;
                        moniker.BindToObject(null, null, ref filterId, out sourceObj);

                        Marshal.ReleaseComObject(moniker);
                        moniker = null;
                    }
                    Marshal.ReleaseComObject(bindCtx);
                    bindCtx = null;
                }
                // ----

                if (sourceObj == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }

                sourceBase = (IBaseFilter)sourceObj;

                // Get type for sample grabber
                srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (srvType == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObj  = Activator.CreateInstance(srvType);
                sg          = (ISampleGrabber)grabberObj;
                grabberBase = (IBaseFilter)grabberObj;

                // add source filter to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mt = new AMMediaType
                {
                    majorType = MediaType.Video,
                    subType   = MediaSubType.RGB24
                };

                sg.SetMediaType(mt);

                // connect pins
                if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                    System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height);
                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mt.Dispose();
                }

                // render
                graph.Render(DSTools.GetOutPin(grabberBase, 0));

                //
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                // window
                IVideoWindow win = (IVideoWindow)graphObj;
                win.put_AutoShow(false);
                win = null;


                // get media control
                mc = (IMediaControl)graphObj;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine("----: " + e.Message);
            }
            // finalization block
            finally
            {
                // release all objects
                mc          = null;
                graph       = null;
                sourceBase  = null;
                grabberBase = null;
                sg          = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (sourceObj != null)
                {
                    Marshal.ReleaseComObject(sourceObj);
                    sourceObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
コード例 #4
0
ファイル: DxPlayer.cs プロジェクト: hpavlov/occurec
        // Save the size parameters for use in SnapShot
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            try
            {

                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                // Get the struct
                VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(media.formatPtr, videoInfoHeader);

                // Grab the size info
                m_videoWidth = videoInfoHeader.BmiHeader.Width;
                m_videoHeight = videoInfoHeader.BmiHeader.Height;
                m_stride = videoInfoHeader.BmiHeader.ImageSize / m_videoHeight;
                m_ImageSize = videoInfoHeader.BmiHeader.ImageSize;
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }
コード例 #5
0
        public void BuildGraph()
        {
            ICaptureGraphBuilder2   captureGraphBuilder2 = (ICaptureGraphBuilder2)null;
            IBaseFilter             ppFilter             = (IBaseFilter)null;
            ISampleGrabber          sampleGrabber        = (ISampleGrabber)null;
            List <DeviceEnumerator> deviceEnumeratorList = (List <DeviceEnumerator>)null;

            try
            {
                Logger.Info("Creating List of devices");
                deviceEnumeratorList = DeviceEnumerator.ListDevices(Guids.VideoInputDeviceCategory);
            }
            catch (Exception ex)
            {
                Logger.Error("Exception in finding Video device. Err : {0}", (object)ex.ToString());
            }
            if (deviceEnumeratorList != null)
            {
                if (deviceEnumeratorList.Count != 0)
                {
                    try
                    {
                        Logger.Info("found {0} Camera, Opening {1}", (object)deviceEnumeratorList.Count, (object)this.m_Unit);
                        DeviceEnumerator deviceEnumerator = this.m_Unit >= deviceEnumeratorList.Count ? deviceEnumeratorList[0] : deviceEnumeratorList[this.m_Unit];
                        this.m_FilterGraph   = (IFilterGraph2) new FilterGraph();
                        this.m_mediaCtrl     = this.m_FilterGraph as IMediaControl;
                        captureGraphBuilder2 = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
                        sampleGrabber        = (ISampleGrabber) new SampleGrabber();
                        ErrorHandler errorHandler1 = (ErrorHandler)captureGraphBuilder2.SetFiltergraph((IGraphBuilder)this.m_FilterGraph);
                        if (errorHandler1.GetError() != 0)
                        {
                            Logger.Error("SetFiltergraph failed with {0:X}..", (object)errorHandler1.GetError());
                        }
                        ErrorHandler errorHandler2 = (ErrorHandler)this.m_FilterGraph.AddSourceFilterForMoniker(deviceEnumerator.Moniker, (IBindCtx)null, "Video input", out ppFilter);
                        if (errorHandler2.GetError() != 0)
                        {
                            Logger.Error("AddSourceFilterForMoniker failed with {0:X}", (object)errorHandler2.GetError());
                        }
                        AMMediaType pmt = new AMMediaType()
                        {
                            majorType = Guids.MediaTypeVideo
                        };
                        if (this.m_color == SupportedColorFormat.YUV2)
                        {
                            pmt.subType = Guids.MediaSubtypeYUY2;
                        }
                        else
                        {
                            if (this.m_color != SupportedColorFormat.RGB24)
                            {
                                throw new Exception("Unsupported color format");
                            }
                            pmt.subType = Guids.MediaSubtypeRGB24;
                        }
                        pmt.formatType = Guids.FormatTypesVideoInfo;
                        ErrorHandler errorHandler3 = (ErrorHandler)sampleGrabber.SetMediaType(pmt);
                        this.FreeAMMedia(pmt);
                        ErrorHandler errorHandler4 = (ErrorHandler)sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
                        if (errorHandler4.GetError() != 0)
                        {
                            Logger.Error("Grabber setcallback failed with {0:X}", (object)errorHandler4.GetError());
                        }
                        IBaseFilter  baseFilter    = (IBaseFilter)sampleGrabber;
                        ErrorHandler errorHandler5 = (ErrorHandler)this.m_FilterGraph.AddFilter(baseFilter, "FrameGrabber");
                        if (errorHandler5.GetError() != 0)
                        {
                            Logger.Error("AddFilter failed with {0:X}", (object)errorHandler5.GetError());
                        }
                        object       ppint;
                        ErrorHandler errorHandler6 = (ErrorHandler)captureGraphBuilder2.FindInterface(Guids.PinCategoryCapture, Guids.MediaTypeVideo, ppFilter, typeof(IAMStreamConfig).GUID, out ppint);
                        if (errorHandler6.GetError() != 0)
                        {
                            Logger.Error("FindInterface failed with {0:X}", (object)errorHandler6.GetError());
                        }
                        if (!(ppint is IAMStreamConfig amStreamConfig))
                        {
                            throw new Exception("Stream config Error");
                        }
                        errorHandler3 = (ErrorHandler)amStreamConfig.GetFormat(out pmt);
                        VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
                        Marshal.PtrToStructure(pmt.pbFormat, (object)videoInfoHeader);
                        videoInfoHeader.AvgTimePerFrame  = (long)(10000000 / this.m_FrameRate);
                        videoInfoHeader.BmiHeader.Width  = this.m_Width;
                        videoInfoHeader.BmiHeader.Height = this.m_Height;
                        Marshal.StructureToPtr((object)videoInfoHeader, pmt.pbFormat, false);
                        ErrorHandler errorHandler7 = (ErrorHandler)amStreamConfig.SetFormat(pmt);
                        if (errorHandler7.GetError() != 0)
                        {
                            Logger.Error("conf.setformat failed with {0:X}", (object)errorHandler7.GetError());
                        }
                        this.FreeAMMedia(pmt);
                        ErrorHandler errorHandler8 = (ErrorHandler)captureGraphBuilder2.RenderStream(Guids.PinCategoryCapture, Guids.MediaTypeVideo, (object)ppFilter, (IBaseFilter)null, baseFilter);
                        if (errorHandler8.GetError() != 0)
                        {
                            Logger.Error("RenderStream failed with {0:X}", (object)errorHandler8.GetError());
                        }
                        AMMediaType amMediaType = new AMMediaType();
                        errorHandler3 = (ErrorHandler)sampleGrabber.GetConnectedMediaType(amMediaType);
                        if (amMediaType.formatType != Guids.FormatTypesVideoInfo)
                        {
                            throw new ColorFormatNotSupported("Not able to connect to Video Media");
                        }
                        if (amMediaType.pbFormat == IntPtr.Zero)
                        {
                            throw new Exception("Format Array is null");
                        }
                        VideoInfoHeader structure = (VideoInfoHeader)Marshal.PtrToStructure(amMediaType.pbFormat, typeof(VideoInfoHeader));
                        this.m_Width  = structure.BmiHeader.Width;
                        this.m_Height = structure.BmiHeader.Height;
                        this.m_Stride = this.m_Width * ((int)structure.BmiHeader.BitCount / 8);
                        if (this.m_Buffer == IntPtr.Zero)
                        {
                            this.m_Buffer = Marshal.AllocCoTaskMem(this.m_Stride * this.m_Height);
                        }
                        this.FreeAMMedia(amMediaType);
                        return;
                    }
                    catch
                    {
                        throw;
                    }
                    finally
                    {
                        if (ppFilter != null)
                        {
                            Marshal.ReleaseComObject((object)ppFilter);
                        }
                        if (sampleGrabber != null)
                        {
                            Marshal.ReleaseComObject((object)sampleGrabber);
                        }
                        if (captureGraphBuilder2 != null)
                        {
                            Marshal.ReleaseComObject((object)captureGraphBuilder2);
                        }
                    }
                }
            }
            Logger.Info("CAMERA: Could not find a camera device!");
        }
コード例 #6
0
ファイル: VideoFramePusher.cs プロジェクト: DeSciL/Ogama
    /// <summary> Read and store the properties </summary>
    private void SaveSizeInfo(ISampleGrabber sampGrabber)
    {
      int hr;

      // Get the media type from the SampleGrabber
      AMMediaType media = new AMMediaType();
      hr = sampGrabber.GetConnectedMediaType(media);
      DsError.ThrowExceptionForHR(hr);

      if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
      {
        throw new NotSupportedException("Unknown Grabber Media Format");
      }

      // Grab the size info
      VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
      m_videoWidth = videoInfoHeader.BmiHeader.Width;
      m_videoHeight = videoInfoHeader.BmiHeader.Height;
      m_stride = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

      //m_bitmapDataArray = new byte[videoInfoHeader.BmiHeader.ImageSize];
      m_handle = Marshal.AllocCoTaskMem(m_stride * m_videoHeight);

      DsUtils.FreeAMMediaType(media);
      media = null;
    }
コード例 #7
0
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();
            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.WaveEx) || (media.formatPtr == IntPtr.Zero)) {
                throw new NotSupportedException("Unknown Grabber Audio Format");
            }

            WaveFormatEx infoHeader = (WaveFormatEx)Marshal.PtrToStructure(media.formatPtr, typeof(WaveFormatEx));
            m_Channels = infoHeader.nChannels;
            m_SampleRate = infoHeader.nSamplesPerSec;
            m_BitsPerSample = infoHeader.wBitsPerSample;

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
コード例 #8
0
        public WaveFormatEx GetSampleGrabberFormat(ISampleGrabber sampleGrabber)
        {
            int hr;

            AMMediaType mediaInfo = new AMMediaType();
            hr = sampleGrabber.GetConnectedMediaType(mediaInfo);
            DsError.ThrowExceptionForHR(hr);

            if ((mediaInfo.formatType != FormatType.WaveEx) || (mediaInfo.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            WaveFormatEx format = new WaveFormatEx();
            format = (WaveFormatEx)Marshal.PtrToStructure(mediaInfo.formatPtr, typeof(WaveFormatEx));
            Marshal.FreeCoTaskMem(mediaInfo.formatPtr);
            mediaInfo.formatPtr = IntPtr.Zero;

            return format;
        }
コード例 #9
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        protected VideoPlayer(string FileName)
        {
            try
            {
                // Set video state
                currentState = VideoState.Stopped;

                // Store Filename
                filename = FileName;

                // Open DirectShow Interfaces
                InitInterfaces();

                // Create a SampleGrabber Filter and add it to the FilterGraph
                //SampleGrabber sg = new SampleGrabber();
                var comtype = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (comtype == null)
                {
                    throw new NotSupportedException("DirectX (8.1 or higher) not installed?");
                }
                m_comObject = Activator.CreateInstance(comtype);

                ISampleGrabber sampleGrabber = (ISampleGrabber)m_comObject;
                m_graphBuilder.AddFilter((IBaseFilter)m_comObject, "Grabber");

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();
                mt.majorType  = MEDIATYPE_Video;    // Video
                mt.subType    = MEDIASUBTYPE_RGB32; // RGB32
                mt.formatType = FORMAT_VideoInfo;   // VideoInfo
                sampleGrabber.SetMediaType(mt);

                // Construct the rest of the FilterGraph
                m_graphBuilder.RenderFile(filename, null);

                // Set SampleGrabber Properties
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)m_graphBuilder;
                //pVideoWindow.put_AutoShow(OABool.False);
                pVideoWindow.put_AutoShow(0);

                // Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                sampleGrabber.GetConnectedMediaType(MediaType);
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                videoHeight     = pVideoHeader.BmiHeader.Height;
                videoWidth      = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate         = pVideoHeader.BitRate;
                m_mediaSeeking.GetDuration(out videoDuration);

                // Create byte arrays to hold video data
                m_videoDataRgba = new MySwapQueue <byte[]>(() => new byte[(videoHeight * videoWidth) * 4]); // RGBA format (4 bytes per pixel)
            }
            catch (Exception e)
            {
                throw new Exception("Unable to Load or Play the video file", e);
            }
        }
コード例 #10
0
        protected void CompleteAudioSampleGrabberIntialization()
        {
            _actualAudioFormat = null;
            if (sampleGrabber != null)
            {
                AMMediaType mtAudio = new AMMediaType();
                if (HRESULT.SUCCEEDED(sampleGrabber.GetConnectedMediaType(mtAudio)))
                {
                    _actualAudioFormat = (WaveFormatEx)Marshal.PtrToStructure(mtAudio.formatPtr, typeof(WaveFormatEx));

                    const int WAVEFORM_WNDSIZEFACTOR = 128;
                    const int VU_WNDSIZEFACTOR       = 4096;
                    const int FFT_WNDSIZEFACTOR      = 16;

                    int freq =
                        (MediaRenderer.DefaultInstance.ActualAudioFormat == null) ? 44100 :
                        MediaRenderer.DefaultInstance.ActualAudioFormat.nSamplesPerSec;

                    try
                    {
                        int k1 = 0, k2 = 0, k3 = 0;

                        while (freq / (1 << k1) > WAVEFORM_WNDSIZEFACTOR)
                        {
                            k1++;
                        }
                        while (freq / (1 << k2) > FFT_WNDSIZEFACTOR)
                        {
                            k2++;
                        }
                        while (freq / (1 << k3) > VU_WNDSIZEFACTOR)
                        {
                            k3++;
                        }

                        _waveformWindowSize = (1 << k1);
                        _fftWindowSize      = (1 << k2);
                        _vuMeterWindowSize  = (1 << k3);

                        _maxLevel =
                            (MediaRenderer.DefaultInstance.ActualAudioFormat != null) ?
                            (1 << (MediaRenderer.DefaultInstance.ActualAudioFormat.wBitsPerSample - 1)) - 1 :
                            short.MaxValue;
                    }
                    catch
                    {
                        _vuMeterWindowSize  = 64;
                        _waveformWindowSize = 512;
                        _fftWindowSize      = 4096;
                        _maxLevel           = short.MaxValue;
                    }
                    finally
                    {
                        _maxLogLevel = Math.Log(_maxLevel);
                    }

                    sampleGrabberConfigured.Set();
                    return;
                }
            }
        }
コード例 #11
0
        public bool AddVideoFilters(IGraphBuilder graphBuilder, ICaptureGraphBuilder2 captureGraphBuilder)
        {
            bool ret = SetDevice();

            if (!ret)
            {
                return(false);
            }

            //サンプルグラバ(videoSampleGrabber)を作成
            mVideoSampleGrabber = GraphFactory.MakeSampleGrabber();

            //フィルタと関連付ける.
            mVideoGrabFilter = (IBaseFilter)mVideoSampleGrabber;

            //キャプチャするビデオデータのフォーマットを設定.
            AMMediaType amMediaType = new AMMediaType();

            amMediaType.majorType  = MediaType.Video;
            amMediaType.subType    = MediaSubType.RGB24;
            amMediaType.formatType = FormatType.VideoInfo;

            int result;

            result = mVideoSampleGrabber.SetMediaType(amMediaType);
            if (result < 0)
            {
                Marshal.ThrowExceptionForHR(result);
            }

            // 映像のcaptureFilter(ソースフィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
            result = graphBuilder.AddFilter(mVideoCaptureFilter, "Video Capture Device");
            if (result < 0)
            {
                Marshal.ThrowExceptionForHR(result);
            }

            //videoGrabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
            result = graphBuilder.AddFilter(mVideoGrabFilter, "Video Grab Filter");
            //result = graphBuilder.AddFilter(videoGrabFilter, "Frame Grab Filter");
            if (result < 0)
            {
                Marshal.ThrowExceptionForHR(result);
            }

            // 映像のキャプチャフィルタをサンプルグラバーフィルタに接続する.
            result = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, mVideoCaptureFilter, null, mVideoGrabFilter);
            if (result < 0)
            {
                Marshal.ThrowExceptionForHR(result);
            }

            // 映像のキャプチャフィルタをデフォルトのレンダラフィルタ(ディスプレイ上に出力)に接続する.(プレビュー)
            result = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, mVideoCaptureFilter, null, null);
            if (result < 0)
            {
                Marshal.ThrowExceptionForHR(result);
            }

            // フレームキャプチャの設定が完了したかを確認する.
            amMediaType = new AMMediaType();
            result      = mVideoSampleGrabber.GetConnectedMediaType(amMediaType);
            if (result < 0)
            {
                Marshal.ThrowExceptionForHR(result);
            }
            if ((amMediaType.formatType != FormatType.VideoInfo) || (amMediaType.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("エラー:キャプチャできない映像メディアフォーマットです.Error: This video media format cannnot be caputered.");
            }

            // キャプチャするビデオデータのフォーマットから,videoInfoHeaderを作成する.
            mVideoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(amMediaType.formatPtr, typeof(VideoInfoHeader));
            Marshal.FreeCoTaskMem(amMediaType.formatPtr);
            amMediaType.formatPtr = IntPtr.Zero;

            // フィルタ内を通るサンプルをバッファにコピーしないように指定する.
            result = mVideoSampleGrabber.SetBufferSamples(false);
            // サンプルを一つ(1フレーム)受け取ったらフィルタを停止するように指定する.
            if (result == 0)
            {
                result = mVideoSampleGrabber.SetOneShot(false);
            }
            // コールバック関数の利用を停止する.
            if (result == 0)
            {
                result = mVideoSampleGrabber.SetCallback(null, 0);
            }
            if (result < 0)
            {
                Marshal.ThrowExceptionForHR(result);
            }

            return(true);
        }
コード例 #12
0
        private static Bitmap GetBitmap(IGraphBuilder graph, ISampleGrabber sg, long grabPosition, out EventCode ec)
        {
            IntPtr pBuffer = IntPtr.Zero;
            int pBufferSize = 0;
            Bitmap b = null;
            int hr = 0;

            try
            {
                IMediaSeeking ims = graph as IMediaSeeking;

                bool canDuration = false;
                bool canPos = false;
                bool canSeek = false;
                long pDuration = 0;
                long pCurrent = 0;

                if (ims != null)
                {
                    AMSeekingSeekingCapabilities caps;

                    hr = ims.GetCapabilities(out caps);
                    if ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration)
                        canDuration = true;
                    if ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos)
                        canPos = true;
                    if ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute)
                        canSeek = true;

                    if (canDuration)
                        hr = ims.GetDuration(out pDuration);

                    if (grabPosition > pDuration)
                        grabPosition = pDuration - 1;

                    if (canSeek)
                    {
                        hr = ims.SetPositions(new DsLong(grabPosition), AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning);
                        DsError.ThrowExceptionForHR(hr);
                    }

                    if (canPos)
                        hr = ims.GetCurrentPosition(out pCurrent);
                }

                if (canPos)
                    hr = ims.GetCurrentPosition(out pCurrent);

                IMediaControl mControl = graph as IMediaControl;
                IMediaEvent mEvent = graph as IMediaEvent;

                //ec = EventCode.SystemBase;

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Run();
                DsError.ThrowExceptionForHR(hr);

                hr = mEvent.WaitForCompletion(int.MaxValue, out ec);
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Stop();
                DsError.ThrowExceptionForHR(hr);

                if (ec != EventCode.Complete)
                    return null;

                hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer);
                DsError.ThrowExceptionForHR(hr);

                pBuffer = Marshal.AllocCoTaskMem(pBufferSize);

                hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer);
                DsError.ThrowExceptionForHR(hr);

                if (pBuffer != IntPtr.Zero)
                {
                    AMMediaType sgMt = new AMMediaType();
                    int videoWidth = 0;
                    int videoHeight = 0;
                    int stride = 0;

                    try
                    {
                        hr = sg.GetConnectedMediaType(sgMt);
                        DsError.ThrowExceptionForHR(hr);

                        if (sgMt.formatPtr != IntPtr.Zero)
                        {
                            if (sgMt.formatType == FormatType.VideoInfo)
                            {
                                VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(sgMt.formatPtr, typeof(VideoInfoHeader));
                                videoWidth = vih.BmiHeader.Width;
                                videoHeight = vih.BmiHeader.Height;
                                stride = videoWidth * (vih.BmiHeader.BitCount / 8);
                            }
                            else
                                throw new ApplicationException("Unsupported Sample");

                            b = new Bitmap(videoWidth, videoHeight, stride, System.Drawing.Imaging.PixelFormat.Format32bppRgb, pBuffer);
                            b.RotateFlip(RotateFlipType.RotateNoneFlipY);
                        }
                    }
                    finally
                    {
                        DsUtils.FreeAMMediaType(sgMt);
                    }
                }

                return b;
            }
            finally
            {
                if (pBuffer != IntPtr.Zero)
                    Marshal.FreeCoTaskMem(pBuffer);
            }
        }
コード例 #13
0
        private void InitializeCapture()
        {
            graphBuilder = (IGraphBuilder)new FilterGraph();
            mediaControl = (IMediaControl)graphBuilder;

            captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter videoInput = GetVideoInputObject();
            if (null != videoInput)
            {
                SetConfigurations(videoInput);

                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render");
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.AddFilter(videoInput, "Camera");
                DsError.ThrowExceptionForHR(hr);

                AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo };
                hr = sampleGrabber.SetMediaType(type);
                DsError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(type);

                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.GetConnectedMediaType(new AMMediaType());

                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
                hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter);
                DsError.ThrowExceptionForHR(hr);

                Marshal.ReleaseComObject(videoInput);
            }
        }
コード例 #14
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void Init()
        {
            try
            {
                log.Trace("Start worker thread");
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch
                                {
                                    // Trace
                                    log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);
                    log.Trace("_grabber set up");

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();
                    log.Trace("control runs");

                    // Wait for the stop signal
                    //while (!_stopSignal.WaitOne(0, true))
                    //{
                    //    Thread.Sleep(10);
                    //}
                }
            }catch (Exception ex)
            {
                // Trace
                log.Debug(ex);
                Release();
            }
        }
コード例 #15
0
        private void WorkerThread(bool runGraph)
        {
            ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser;
            bool    flag         = false;
            Grabber grabber      = new Grabber(this, snapshotMode: false);
            Grabber grabber2     = new Grabber(this, snapshotMode: true);
            object  obj          = null;
            object  obj2         = null;
            object  obj3         = null;
            object  obj4         = null;
            object  retInterface = null;
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            IFilterGraph2         filterGraph         = null;
            IBaseFilter           baseFilter          = null;
            IBaseFilter           baseFilter2         = null;
            IBaseFilter           baseFilter3         = null;
            ISampleGrabber        sampleGrabber       = null;
            ISampleGrabber        sampleGrabber2      = null;
            IMediaControl         mediaControl        = null;
            IAMVideoControl       iAMVideoControl     = null;
            IMediaEventEx         mediaEventEx        = null;
            IPin        pin         = null;
            IAMCrossbar iAMCrossbar = null;

            try
            {
                Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating capture graph builder");
                }
                obj = Activator.CreateInstance(typeFromCLSID);
                captureGraphBuilder = (ICaptureGraphBuilder2)obj;
                typeFromCLSID       = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }
                obj2        = Activator.CreateInstance(typeFromCLSID);
                filterGraph = (IFilterGraph2)obj2;
                captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph);
                sourceObject = FilterInfo.CreateFilter(deviceMoniker);
                if (sourceObject == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }
                baseFilter = (IBaseFilter)sourceObject;
                try
                {
                    iAMVideoControl = (IAMVideoControl)sourceObject;
                }
                catch
                {
                }
                typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }
                obj3           = Activator.CreateInstance(typeFromCLSID);
                sampleGrabber  = (ISampleGrabber)obj3;
                baseFilter2    = (IBaseFilter)obj3;
                obj4           = Activator.CreateInstance(typeFromCLSID);
                sampleGrabber2 = (ISampleGrabber)obj4;
                baseFilter3    = (IBaseFilter)obj4;
                filterGraph.AddFilter(baseFilter, "source");
                filterGraph.AddFilter(baseFilter2, "grabber_video");
                filterGraph.AddFilter(baseFilter3, "grabber_snapshot");
                AMMediaType aMMediaType = new AMMediaType();
                aMMediaType.MajorType = MediaType.Video;
                aMMediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(aMMediaType);
                sampleGrabber2.SetMediaType(aMMediaType);
                captureGraphBuilder.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, baseFilter, typeof(IAMCrossbar).GUID, out retInterface);
                if (retInterface != null)
                {
                    iAMCrossbar = (IAMCrossbar)retInterface;
                }
                isCrossbarAvailable = (iAMCrossbar != null);
                crossbarVideoInputs = ColletCrossbarVideoInputs(iAMCrossbar);
                if (iAMVideoControl != null)
                {
                    captureGraphBuilder.FindPin(sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, unconnected: false, 0, out pin);
                    if (pin != null)
                    {
                        iAMVideoControl.GetCaps(pin, out VideoControlFlags flags);
                        flag = ((flags & VideoControlFlags.ExternalTriggerEnable) != 0);
                    }
                }
                sampleGrabber.SetBufferSamples(bufferThem: false);
                sampleGrabber.SetOneShot(oneShot: false);
                sampleGrabber.SetCallback(grabber, 1);
                sampleGrabber2.SetBufferSamples(bufferThem: true);
                sampleGrabber2.SetOneShot(oneShot: false);
                sampleGrabber2.SetCallback(grabber2, 1);
                GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.Capture, videoResolution, ref videoCapabilities);
                if (flag)
                {
                    GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities);
                }
                else
                {
                    snapshotCapabilities = new VideoCapabilities[0];
                }
                lock (cacheVideoCapabilities)
                {
                    if (videoCapabilities != null && !cacheVideoCapabilities.ContainsKey(deviceMoniker))
                    {
                        cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities);
                    }
                }
                lock (cacheSnapshotCapabilities)
                {
                    if (snapshotCapabilities != null && !cacheSnapshotCapabilities.ContainsKey(deviceMoniker))
                    {
                        cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities);
                    }
                }
                if (runGraph)
                {
                    captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, baseFilter, null, baseFilter2);
                    if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0)
                    {
                        VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader));
                        grabber.Width  = videoInfoHeader.BmiHeader.Width;
                        grabber.Height = videoInfoHeader.BmiHeader.Height;
                        aMMediaType.Dispose();
                    }
                    if (flag && provideSnapshots)
                    {
                        captureGraphBuilder.RenderStream(PinCategory.StillImage, MediaType.Video, baseFilter, null, baseFilter3);
                        if (sampleGrabber2.GetConnectedMediaType(aMMediaType) == 0)
                        {
                            VideoInfoHeader videoInfoHeader2 = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader));
                            grabber2.Width  = videoInfoHeader2.BmiHeader.Width;
                            grabber2.Height = videoInfoHeader2.BmiHeader.Height;
                            aMMediaType.Dispose();
                        }
                    }
                    mediaControl = (IMediaControl)obj2;
                    mediaEventEx = (IMediaEventEx)obj2;
                    mediaControl.Run();
                    if (flag && provideSnapshots)
                    {
                        startTime = DateTime.Now;
                        iAMVideoControl.SetMode(pin, VideoControlFlags.ExternalTriggerEnable);
                    }
                    do
                    {
                        if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0)
                        {
                            mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2);
                            if (lEventCode == DsEvCode.DeviceLost)
                            {
                                reason = ReasonToFinishPlaying.DeviceLost;
                                break;
                            }
                        }
                        if (needToSetVideoInput)
                        {
                            needToSetVideoInput = false;
                            if (isCrossbarAvailable.Value)
                            {
                                SetCurrentCrossbarInput(iAMCrossbar, crossbarVideoInput);
                                crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar);
                            }
                        }
                        if (needToSimulateTrigger)
                        {
                            needToSimulateTrigger = false;
                            if (flag && provideSnapshots)
                            {
                                iAMVideoControl.SetMode(pin, VideoControlFlags.Trigger);
                            }
                        }
                        if (needToDisplayPropertyPage)
                        {
                            needToDisplayPropertyPage = false;
                            DisplayPropertyPage(parentWindowForPropertyPage, sourceObject);
                            if (iAMCrossbar != null)
                            {
                                crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar);
                            }
                        }
                        if (needToDisplayCrossBarPropertyPage)
                        {
                            needToDisplayCrossBarPropertyPage = false;
                            if (iAMCrossbar != null)
                            {
                                DisplayPropertyPage(parentWindowForPropertyPage, iAMCrossbar);
                                crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar);
                            }
                        }
                    }while (!stopEvent.WaitOne(100, exitContext: false));
                    mediaControl.Stop();
                }
            }
            catch (Exception ex)
            {
                if (this.VideoSourceError != null)
                {
                    this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message));
                }
            }
            finally
            {
                captureGraphBuilder = null;
                filterGraph         = null;
                baseFilter          = null;
                mediaControl        = null;
                iAMVideoControl     = null;
                mediaEventEx        = null;
                pin            = null;
                iAMCrossbar    = null;
                baseFilter2    = null;
                baseFilter3    = null;
                sampleGrabber  = null;
                sampleGrabber2 = null;
                if (obj2 != null)
                {
                    Marshal.ReleaseComObject(obj2);
                    obj2 = null;
                }
                if (sourceObject != null)
                {
                    Marshal.ReleaseComObject(sourceObject);
                    sourceObject = null;
                }
                if (obj3 != null)
                {
                    Marshal.ReleaseComObject(obj3);
                    obj3 = null;
                }
                if (obj4 != null)
                {
                    Marshal.ReleaseComObject(obj4);
                    obj4 = null;
                }
                if (obj != null)
                {
                    Marshal.ReleaseComObject(obj);
                    obj = null;
                }
                if (retInterface != null)
                {
                    Marshal.ReleaseComObject(retInterface);
                    retInterface = null;
                }
            }
            if (this.PlayingFinished != null)
            {
                this.PlayingFinished(this, reason);
            }
        }
コード例 #16
0
        private static Bitmap GetBitmap(IGraphBuilder graph, ISampleGrabber sg, long grabPosition, out EventCode ec)
        {
            IntPtr pBuffer     = IntPtr.Zero;
            int    pBufferSize = 0;
            Bitmap b           = null;
            int    hr          = 0;

            try
            {
                IMediaSeeking ims = graph as IMediaSeeking;

                bool canDuration = false;
                bool canPos      = false;
                bool canSeek     = false;
                long pDuration   = 0;
                long pCurrent    = 0;

                if (ims != null)
                {
                    AMSeekingSeekingCapabilities caps;

                    hr = ims.GetCapabilities(out caps);
                    if ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration)
                    {
                        canDuration = true;
                    }
                    if ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos)
                    {
                        canPos = true;
                    }
                    if ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute)
                    {
                        canSeek = true;
                    }

                    if (canDuration)
                    {
                        hr = ims.GetDuration(out pDuration);
                    }

                    if (grabPosition > pDuration)
                    {
                        grabPosition = pDuration - 1;
                    }

                    if (canSeek)
                    {
                        hr = ims.SetPositions(new DsLong(grabPosition), AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning);
                        DsError.ThrowExceptionForHR(hr);
                    }

                    if (canPos)
                    {
                        hr = ims.GetCurrentPosition(out pCurrent);
                    }
                }

                if (canPos)
                {
                    hr = ims.GetCurrentPosition(out pCurrent);
                }

                IMediaControl mControl = graph as IMediaControl;
                IMediaEvent   mEvent   = graph as IMediaEvent;

                //ec = EventCode.SystemBase;

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Run();
                DsError.ThrowExceptionForHR(hr);

                hr = mEvent.WaitForCompletion(int.MaxValue, out ec);
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Stop();
                DsError.ThrowExceptionForHR(hr);

                if (ec != EventCode.Complete)
                {
                    return(null);
                }

                hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer);
                DsError.ThrowExceptionForHR(hr);

                pBuffer = Marshal.AllocCoTaskMem(pBufferSize);

                hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer);
                DsError.ThrowExceptionForHR(hr);

                if (pBuffer != IntPtr.Zero)
                {
                    AMMediaType sgMt        = new AMMediaType();
                    int         videoWidth  = 0;
                    int         videoHeight = 0;
                    int         stride      = 0;

                    try
                    {
                        hr = sg.GetConnectedMediaType(sgMt);
                        DsError.ThrowExceptionForHR(hr);

                        if (sgMt.formatPtr != IntPtr.Zero)
                        {
                            if (sgMt.formatType == FormatType.VideoInfo)
                            {
                                VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(sgMt.formatPtr, typeof(VideoInfoHeader));
                                videoWidth  = vih.BmiHeader.Width;
                                videoHeight = vih.BmiHeader.Height;
                                stride      = videoWidth * (vih.BmiHeader.BitCount / 8);
                            }
                            else
                            {
                                throw new ApplicationException("Unsupported Sample");
                            }

                            b = new Bitmap(videoWidth, videoHeight, stride, System.Drawing.Imaging.PixelFormat.Format32bppRgb, pBuffer);
                            b.RotateFlip(RotateFlipType.RotateNoneFlipY);
                        }
                    }
                    finally
                    {
                        DsUtils.FreeAMMediaType(sgMt);
                    }
                }

                return(b);
            }
            finally
            {
                if (pBuffer != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(pBuffer);
                }
            }
        }
コード例 #17
0
ファイル: VideoStream.cs プロジェクト: nagyistoce/openvss
        // Thread entry point
        public void WorkerThread()
        {
            bool failed = false;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object sourceObj  = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder     graph       = null;
            IBaseFilter       sourceBase  = null;
            IBaseFilter       grabberBase = null;
            ISampleGrabber    sg          = null;
            IFileSourceFilter fileSource  = null;
            IMediaControl     mc          = null;
            IMediaEventEx     mediaEvent  = null;

            int code, param1, param2;

            while ((!failed) && (!stopEvent.WaitOne(0, true)))
            {
                try
                {
                    // Get type for filter graph
                    Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating filter graph");
                    }

                    // create filter graph
                    graphObj = Activator.CreateInstance(srvType);
                    graph    = (IGraphBuilder)graphObj;

                    // Get type for windows media source filter
                    srvType = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating WM source");
                    }

                    // create windows media source filter
                    sourceObj  = Activator.CreateInstance(srvType);
                    sourceBase = (IBaseFilter)sourceObj;

                    // Get type for sample grabber
                    srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                    if (srvType == null)
                    {
                        throw new ApplicationException("Failed creating sample grabber");
                    }

                    // create sample grabber
                    grabberObj  = Activator.CreateInstance(srvType);
                    sg          = (ISampleGrabber)grabberObj;
                    grabberBase = (IBaseFilter)grabberObj;

                    // add source filter to graph
                    graph.AddFilter(sourceBase, "source");
                    graph.AddFilter(grabberBase, "grabber");

                    // set media type
                    AMMediaType mt = new AMMediaType();
                    mt.majorType = MediaType.Video;
                    mt.subType   = MediaSubType.RGB24;
                    sg.SetMediaType(mt);

                    // load file
                    fileSource = (IFileSourceFilter)sourceObj;
                    fileSource.Load(this.source, null);

                    // connect pins
                    if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0)
                    {
                        throw new ApplicationException("Failed connecting filters");
                    }

                    // get media type
                    if (sg.GetConnectedMediaType(mt) == 0)
                    {
                        VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));

                        grabber.Width  = vih.BmiHeader.Width;
                        grabber.Height = vih.BmiHeader.Height;
                        mt.Dispose();
                    }

                    // render
                    graph.Render(DSTools.GetOutPin(grabberBase, 0));

                    //
                    sg.SetBufferSamples(false);
                    sg.SetOneShot(false);
                    sg.SetCallback(grabber, 1);

                    // window
                    IVideoWindow win = (IVideoWindow)graphObj;
                    win.put_AutoShow(false);
                    win = null;

                    // get events interface
                    mediaEvent = (IMediaEventEx)graphObj;

                    // get media control
                    mc = (IMediaControl)graphObj;

                    // run
                    mc.Run();

                    while (!stopEvent.WaitOne(0, true))
                    {
                        Thread.Sleep(100);

                        // get an event
                        if (mediaEvent.GetEvent(out code, out param1, out param2, 0) == 0)
                        {
                            // release params
                            mediaEvent.FreeEventParams(code, param1, param2);

                            //
                            if (code == (int)EventCode.Complete)
                            {
                                break;
                            }
                        }
                    }

                    mc.StopWhenReady();
                }
                // catch any exceptions
                catch (Exception e)
                {
                    System.Diagnostics.Debug.WriteLine("----: " + e.Message);
                    failed = true;
                }
                // finalization block
                finally
                {
                    // release all objects
                    mediaEvent  = null;
                    mc          = null;
                    fileSource  = null;
                    graph       = null;
                    sourceBase  = null;
                    grabberBase = null;
                    sg          = null;

                    if (graphObj != null)
                    {
                        Marshal.ReleaseComObject(graphObj);
                        graphObj = null;
                    }
                    if (sourceObj != null)
                    {
                        Marshal.ReleaseComObject(sourceObj);
                        sourceObj = null;
                    }
                    if (grabberObj != null)
                    {
                        Marshal.ReleaseComObject(grabberObj);
                        grabberObj = null;
                    }
                }
            }
        }
コード例 #18
0
        private void WorkerThread()
        {
            ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser;
            Grabber        grabber       = new Grabber(this);
            object         obj           = null;
            object         obj2          = null;
            IGraphBuilder  graphBuilder  = null;
            IBaseFilter    filter        = null;
            IBaseFilter    baseFilter    = null;
            ISampleGrabber sampleGrabber = null;
            IMediaControl  mediaControl  = null;
            IMediaEventEx  mediaEventEx  = null;

            try
            {
                Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }
                obj          = Activator.CreateInstance(typeFromCLSID);
                graphBuilder = (IGraphBuilder)obj;
                graphBuilder.AddSourceFilter(fileName, "source", out filter);
                if (filter == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }
                typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }
                obj2          = Activator.CreateInstance(typeFromCLSID);
                sampleGrabber = (ISampleGrabber)obj2;
                baseFilter    = (IBaseFilter)obj2;
                graphBuilder.AddFilter(baseFilter, "grabber");
                AMMediaType aMMediaType = new AMMediaType();
                aMMediaType.MajorType = MediaType.Video;
                aMMediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(aMMediaType);
                int  num   = 0;
                IPin inPin = Tools.GetInPin(baseFilter, 0);
                IPin pin   = null;
                while (true)
                {
                    pin = Tools.GetOutPin(filter, num);
                    if (pin == null)
                    {
                        Marshal.ReleaseComObject(inPin);
                        throw new ApplicationException("Did not find acceptable output video pin in the given source");
                    }
                    if (graphBuilder.Connect(pin, inPin) >= 0)
                    {
                        break;
                    }
                    Marshal.ReleaseComObject(pin);
                    pin = null;
                    num++;
                }
                Marshal.ReleaseComObject(pin);
                Marshal.ReleaseComObject(inPin);
                if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0)
                {
                    VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader));
                    grabber.Width  = videoInfoHeader.BmiHeader.Width;
                    grabber.Height = videoInfoHeader.BmiHeader.Height;
                    aMMediaType.Dispose();
                }
                if (!preventFreezing)
                {
                    graphBuilder.Render(Tools.GetOutPin(baseFilter, 0));
                    IVideoWindow videoWindow = (IVideoWindow)obj;
                    videoWindow.put_AutoShow(autoShow: false);
                    videoWindow = null;
                }
                sampleGrabber.SetBufferSamples(bufferThem: false);
                sampleGrabber.SetOneShot(oneShot: false);
                sampleGrabber.SetCallback(grabber, 1);
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)obj;
                    mediaFilter.SetSyncSource(null);
                }
                mediaControl = (IMediaControl)obj;
                mediaEventEx = (IMediaEventEx)obj;
                mediaControl.Run();
                do
                {
                    if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0)
                    {
                        mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2);
                        if (lEventCode == DsEvCode.Complete)
                        {
                            reason = ReasonToFinishPlaying.EndOfStreamReached;
                            break;
                        }
                    }
                }while (!stopEvent.WaitOne(100, exitContext: false));
                mediaControl.Stop();
            }
            catch (Exception ex)
            {
                if (this.VideoSourceError != null)
                {
                    this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message));
                }
            }
            finally
            {
                graphBuilder  = null;
                baseFilter    = null;
                sampleGrabber = null;
                mediaControl  = null;
                mediaEventEx  = null;
                if (obj != null)
                {
                    Marshal.ReleaseComObject(obj);
                    obj = null;
                }
                if (filter != null)
                {
                    Marshal.ReleaseComObject(filter);
                    filter = null;
                }
                if (obj2 != null)
                {
                    Marshal.ReleaseComObject(obj2);
                    obj2 = null;
                }
            }
            if (this.PlayingFinished != null)
            {
                this.PlayingFinished(this, reason);
            }
        }
コード例 #19
0
        private void WorkerThread(bool runGraph)
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            bool isSapshotSupported            = false;


            Grabber videoGrabber    = new Grabber(this, false);
            Grabber snapshotGrabber = new Grabber(this, true);


            object captureGraphObject    = null;
            object graphObject           = null;
            object videoGrabberObject    = null;
            object snapshotGrabberObject = null;
            object crossbarObject        = null;


            ICaptureGraphBuilder2 captureGraph          = null;
            IFilterGraph2         graph                 = null;
            IBaseFilter           sourceBase            = null;
            IBaseFilter           videoGrabberBase      = null;
            IBaseFilter           snapshotGrabberBase   = null;
            ISampleGrabber        videoSampleGrabber    = null;
            ISampleGrabber        snapshotSampleGrabber = null;
            IMediaControl         mediaControl          = null;
            IAMVideoControl       videoControl          = null;
            IMediaEventEx         mediaEvent            = null;
            IPin        pinStillImage = null;
            IAMCrossbar crossbar      = null;

            try
            {
                Type type = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating capture graph builder");
                }


                captureGraphObject = Activator.CreateInstance(type);
                captureGraph       = (ICaptureGraphBuilder2)captureGraphObject;


                type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }


                graphObject = Activator.CreateInstance(type);
                graph       = (IFilterGraph2)graphObject;


                captureGraph.SetFiltergraph((IGraphBuilder)graph);


                sourceObject = FilterInfo.CreateFilter(deviceMoniker);
                if (sourceObject == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }


                sourceBase = (IBaseFilter)sourceObject;


                try
                {
                    videoControl = (IAMVideoControl)sourceObject;
                }
                catch
                {
                }


                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }


                videoGrabberObject = Activator.CreateInstance(type);
                videoSampleGrabber = (ISampleGrabber)videoGrabberObject;
                videoGrabberBase   = (IBaseFilter)videoGrabberObject;

                snapshotGrabberObject = Activator.CreateInstance(type);
                snapshotSampleGrabber = (ISampleGrabber)snapshotGrabberObject;
                snapshotGrabberBase   = (IBaseFilter)snapshotGrabberObject;


                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(videoGrabberBase, "grabber_video");
                graph.AddFilter(snapshotGrabberBase, "grabber_snapshot");


                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;

                videoSampleGrabber.SetMediaType(mediaType);
                snapshotSampleGrabber.SetMediaType(mediaType);


                captureGraph.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, sourceBase, typeof(IAMCrossbar).GUID, out crossbarObject);
                if (crossbarObject != null)
                {
                    crossbar = (IAMCrossbar)crossbarObject;
                }
                isCrossbarAvailable = (crossbar != null);
                crossbarVideoInputs = ColletCrossbarVideoInputs(crossbar);

                if (videoControl != null)
                {
                    captureGraph.FindPin(sourceObject, PinDirection.Output,
                                         PinCategory.StillImage, MediaType.Video, false, 0, out pinStillImage);

                    if (pinStillImage != null)
                    {
                        VideoControlFlags caps;
                        videoControl.GetCaps(pinStillImage, out caps);
                        isSapshotSupported = ((caps & VideoControlFlags.ExternalTriggerEnable) != 0);
                    }
                }


                videoSampleGrabber.SetBufferSamples(false);
                videoSampleGrabber.SetOneShot(false);
                videoSampleGrabber.SetCallback(videoGrabber, 1);


                snapshotSampleGrabber.SetBufferSamples(true);
                snapshotSampleGrabber.SetOneShot(false);
                snapshotSampleGrabber.SetCallback(snapshotGrabber, 1);


                GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase,
                                                          PinCategory.Capture, videoResolution, ref videoCapabilities);
                if (isSapshotSupported)
                {
                    GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase,
                                                              PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities);
                }
                else
                {
                    snapshotCapabilities = new VideoCapabilities[0];
                }


                lock ( cacheVideoCapabilities )
                {
                    if ((videoCapabilities != null) && (!cacheVideoCapabilities.ContainsKey(deviceMoniker)))
                    {
                        cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities);
                    }
                }
                lock ( cacheSnapshotCapabilities )
                {
                    if ((snapshotCapabilities != null) && (!cacheSnapshotCapabilities.ContainsKey(deviceMoniker)))
                    {
                        cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities);
                    }
                }

                if (runGraph)
                {
                    captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, sourceBase, null, videoGrabberBase);

                    if (videoSampleGrabber.GetConnectedMediaType(mediaType) == 0)
                    {
                        VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                        videoGrabber.Width  = vih.BmiHeader.Width;
                        videoGrabber.Height = vih.BmiHeader.Height;

                        mediaType.Dispose( );
                    }

                    if ((isSapshotSupported) && (provideSnapshots))
                    {
                        captureGraph.RenderStream(PinCategory.StillImage, MediaType.Video, sourceBase, null, snapshotGrabberBase);

                        if (snapshotSampleGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                            snapshotGrabber.Width  = vih.BmiHeader.Width;
                            snapshotGrabber.Height = vih.BmiHeader.Height;

                            mediaType.Dispose( );
                        }
                    }


                    mediaControl = (IMediaControl)graphObject;


                    mediaEvent = (IMediaEventEx)graphObject;
                    IntPtr   p1, p2;
                    DsEvCode code;


                    mediaControl.Run( );

                    if ((isSapshotSupported) && (provideSnapshots))
                    {
                        startTime = DateTime.Now;
                        videoControl.SetMode(pinStillImage, VideoControlFlags.ExternalTriggerEnable);
                    }

                    do
                    {
                        if (mediaEvent != null)
                        {
                            if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                            {
                                mediaEvent.FreeEventParams(code, p1, p2);

                                if (code == DsEvCode.DeviceLost)
                                {
                                    reasonToStop = ReasonToFinishPlaying.DeviceLost;
                                    break;
                                }
                            }
                        }

                        if (needToSetVideoInput)
                        {
                            needToSetVideoInput = false;

                            if (isCrossbarAvailable.Value)
                            {
                                SetCurrentCrossbarInput(crossbar, crossbarVideoInput);
                                crossbarVideoInput = GetCurrentCrossbarInput(crossbar);
                            }
                        }

                        if (needToSimulateTrigger)
                        {
                            needToSimulateTrigger = false;

                            if ((isSapshotSupported) && (provideSnapshots))
                            {
                                videoControl.SetMode(pinStillImage, VideoControlFlags.Trigger);
                            }
                        }

                        if (needToDisplayPropertyPage)
                        {
                            needToDisplayPropertyPage = false;
                            DisplayPropertyPage(parentWindowForPropertyPage, sourceObject);

                            if (crossbar != null)
                            {
                                crossbarVideoInput = GetCurrentCrossbarInput(crossbar);
                            }
                        }

                        if (needToDisplayCrossBarPropertyPage)
                        {
                            needToDisplayCrossBarPropertyPage = false;

                            if (crossbar != null)
                            {
                                DisplayPropertyPage(parentWindowForPropertyPage, crossbar);
                                crossbarVideoInput = GetCurrentCrossbarInput(crossbar);
                            }
                        }
                    }while (!stopEvent.WaitOne(100, false));

                    mediaControl.Stop( );
                }
            }
            catch (Exception exception)
            {
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                captureGraph  = null;
                graph         = null;
                sourceBase    = null;
                mediaControl  = null;
                videoControl  = null;
                mediaEvent    = null;
                pinStillImage = null;
                crossbar      = null;

                videoGrabberBase      = null;
                snapshotGrabberBase   = null;
                videoSampleGrabber    = null;
                snapshotSampleGrabber = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceObject != null)
                {
                    Marshal.ReleaseComObject(sourceObject);
                    sourceObject = null;
                }
                if (videoGrabberObject != null)
                {
                    Marshal.ReleaseComObject(videoGrabberObject);
                    videoGrabberObject = null;
                }
                if (snapshotGrabberObject != null)
                {
                    Marshal.ReleaseComObject(snapshotGrabberObject);
                    snapshotGrabberObject = null;
                }
                if (captureGraphObject != null)
                {
                    Marshal.ReleaseComObject(captureGraphObject);
                    captureGraphObject = null;
                }
                if (crossbarObject != null)
                {
                    Marshal.ReleaseComObject(crossbarObject);
                    crossbarObject = null;
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
コード例 #20
0
ファイル: CapDevice.cs プロジェクト: evilmachina/theMachine
        void RunWorker()
        {
            try
            {

                graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                sourceObject = FilterInfo.CreateFilter(deviceMoniker);

                grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                grabberObject = grabber as IBaseFilter;

                graph.AddFilter(sourceObject, "source");
                graph.AddFilter(grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    grabber.SetMediaType(mediaType);

                    if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                            capGrabber.Width = header.BmiHeader.Width;
                            capGrabber.Height = header.BmiHeader.Height;
                        }
                    }
                    graph.Render(grabberObject.GetPin(PinDirection.Output, 0));
                    grabber.SetBufferSamples(false);
                    grabber.SetOneShot(false);
                    grabber.SetCallback(capGrabber, 1);

                    IVideoWindow wnd = (IVideoWindow)graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    control = (IMediaControl)graph;
                    control.Run();

                    while (!stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex);
            }
            finally
            {
                graph = null;
                sourceObject = null;
                grabberObject = null;
                grabber = null;
                capGrabber = null;
                control = null;

            }
        }
コード例 #21
0
ファイル: DirectShowCapture.cs プロジェクト: hpavlov/occurec
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            AMMediaType media = new AMMediaType();
            int hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            videoWidth = videoInfoHeader.BmiHeader.Width;
            videoHeight = videoInfoHeader.BmiHeader.Height;
            stride = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            DsUtils.FreeAMMediaType(media);
        }
コード例 #22
0
ファイル: FileVideoSource.cs プロジェクト: morphx666/Prism
        private void CreateFilters()
        {
            isValid = true;

            // grabber
            grabberVideo = new GrabberVideo(this);
            grabberAudio = new GrabberAudio(this);

            // objects
            graphObject        = null;
            grabberObjectVideo = null;
            grabberObjectAudio = null;

            int sourceBaseVideoPinIndex = 0;

            try {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                if (fileName.ToLower().EndsWith(".wmv"))
                {
                    type = Type.GetTypeFromCLSID(Clsid.WMASFReader);
                    if (type == null)
                    {
                        throw new ApplicationException("Failed creating ASF Reader filter");
                    }
                    sourceBase = (IBaseFilter)Activator.CreateInstance(type);
                    IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase;
                    sourceFile.Load(fileName, null);
                    graph.AddFilter(sourceBase, "source");
                    sourceBaseVideoPinIndex = 1;
                }
                else
                {
                    graph.AddSourceFilter(fileName, "source", out sourceBase);
                    if (sourceBase == null)
                    {
                        try {
                            type = Type.GetTypeFromCLSID(Clsid.AsyncReader);
                            if (type == null)
                            {
                                throw new ApplicationException("Failed creating Async Reader filter");
                            }
                            sourceBase = (IBaseFilter)Activator.CreateInstance(type);
                            IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase;
                            sourceFile.Load(fileName, null);
                            graph.AddFilter(sourceBase, "source");
                        } catch {
                            throw new ApplicationException("Failed creating source filter");
                        }
                    }
                    sourceBaseVideoPinIndex = 0;
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObjectVideo = Activator.CreateInstance(type);
                sampleGrabberVideo = (ISampleGrabber)grabberObjectVideo;
                grabberBaseVideo   = (IBaseFilter)grabberObjectVideo;

                // add grabber filters to graph
                graph.AddFilter(grabberBaseVideo, "grabberVideo");

                // set media type
                AMMediaType mediaType = new AMMediaType {
                    MajorType = MediaType.Video,
                    SubType   = MediaSubType.ARGB32 /* MediaSubType.RGB24 */
                };
                ;
                sampleGrabberVideo.SetMediaType(mediaType);

                // connect pins
                IPin outPin = Tools.GetOutPin(sourceBase, sourceBaseVideoPinIndex);
                IPin inPin  = Tools.GetInPin(grabberBaseVideo, 0);
                if (graph.Connect(outPin, inPin) < 0)
                {
                    throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo");
                }
                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabberVideo.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabberVideo.Width  = vih.BmiHeader.Width;
                    grabberVideo.Height = vih.BmiHeader.Height;
                    mediaType.Dispose();
                }

                if (useAudioGrabber)
                {
                    // *****************************************************************
                    // ******** Add the audio grabber to monitor audio peaks ***********
                    bool audioGrabberIsConnected  = false;
                    Tools.FilterInfo2 filterInfo2 = Tools.GetNextFilter(sourceBase, PinDirection.Output, 0);
                    foreach (Tools.PinInfo2 pinInfo2 in filterInfo2.Pins)
                    {
                        if (pinInfo2.PinInfo.Direction == PinDirection.Output)
                        {
                            if (!Tools.IsPinConnected(pinInfo2.Pin))
                            {
                                try {
                                    graph.Render(pinInfo2.Pin);

                                    AMMediaType mt = new AMMediaType();
                                    pinInfo2.Pin.ConnectionMediaType(mt);
                                    if (mt.MajorType == MediaType.Audio)
                                    {
                                        // Obtain a reference to the filter connected to the audio output of the video splitter (usually, this is the audio decoder)
                                        Tools.FilterInfo2 decoderFilterInfo2 = Tools.GetNextFilter(pinInfo2.PinInfo.Filter, PinDirection.Output, 0);

                                        // Remove all the filters connected to the audio decoder filter
                                        System.Collections.Generic.List <Tools.FilterInfo2> filtersInfo2 = new System.Collections.Generic.List <Tools.FilterInfo2>();
                                        Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(decoderFilterInfo2.Filter, PinDirection.Output, 0);
                                        while (true)
                                        {
                                            filtersInfo2.Add(testFilterInfo2);
                                            testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0);
                                            if (testFilterInfo2.Filter == null)
                                            {
                                                break;
                                            }
                                        }
                                        foreach (Tools.FilterInfo2 fi2 in filtersInfo2)
                                        {
                                            graph.RemoveFilter(fi2.Filter);
                                            fi2.Release();
                                        }

                                        // get type for sample grabber
                                        type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                                        if (type == null)
                                        {
                                            throw new ApplicationException("Failed creating audio sample grabber");
                                        }

                                        // create sample grabber
                                        grabberObjectAudio = Activator.CreateInstance(type);
                                        sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio;
                                        grabberBaseAudio   = (IBaseFilter)grabberObjectAudio;

                                        // add grabber filters to graph
                                        graph.AddFilter(grabberBaseAudio, "grabberAudio");

                                        // set media type
                                        AMMediaType mediaTypeAudio = new AMMediaType {
                                            MajorType  = MediaType.Audio,
                                            SubType    = MediaSubType.PCM,
                                            FormatType = FormatType.WaveEx
                                        };
                                        sampleGrabberAudio.SetMediaType(mediaTypeAudio);

                                        outPin = Tools.GetOutPin(decoderFilterInfo2.Filter, 0);
                                        inPin  = Tools.GetInPin(grabberBaseAudio, 0);
                                        if (graph.Connect(outPin, inPin) < 0)
                                        {
                                            throw new ApplicationException("Failed connecting filter to grabberBaseAudio");
                                        }
                                        Marshal.ReleaseComObject(outPin);
                                        Marshal.ReleaseComObject(inPin);

                                        // Finally, connect the grabber to the audio renderer
                                        outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                                        graph.Render(outPin);

                                        mt = new AMMediaType();
                                        outPin.ConnectionMediaType(mt);
                                        if (!Tools.IsPinConnected(outPin))
                                        {
                                            throw new ApplicationException("Failed obtaining media audio information");
                                        }
                                        wavFormat = new WaveFormatEx();
                                        Marshal.PtrToStructure(mt.FormatPtr, wavFormat);
                                        Marshal.ReleaseComObject(outPin);

                                        // configure sample grabber
                                        sampleGrabberAudio.SetBufferSamples(false);
                                        sampleGrabberAudio.SetOneShot(false);
                                        sampleGrabberAudio.SetCallback(grabberAudio, 1);

                                        audioGrabberIsConnected = true;
                                        break;
                                    }
                                } catch {
                                }
                            }
                        }
                    }
                    filterInfo2.Release();
                    if (!audioGrabberIsConnected)
                    {
                        foreach (Tools.PinInfo2 pinInfo2 in Tools.GetPins(sourceBase))
                        {
                            if (!Tools.IsPinConnected(pinInfo2.Pin))
                            {
                                foreach (AMMediaType mt in Tools.GetMediaTypes(pinInfo2.Pin))
                                {
                                    if (mt.MajorType == MediaType.Audio)
                                    {
                                        // create sample grabber
                                        grabberObjectAudio = Activator.CreateInstance(type);
                                        sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio;
                                        grabberBaseAudio   = (IBaseFilter)grabberObjectAudio;

                                        // add grabber filters to graph
                                        graph.AddFilter(grabberBaseAudio, "grabberAudio");

                                        // set media type
                                        AMMediaType mediaTypeAudio = new AMMediaType {
                                            MajorType  = MediaType.Audio,
                                            SubType    = MediaSubType.PCM,
                                            FormatType = FormatType.WaveEx
                                        };
                                        sampleGrabberAudio.SetMediaType(mediaTypeAudio);

                                        inPin = Tools.GetInPin(grabberBaseAudio, 0);
                                        if (graph.Connect(pinInfo2.Pin, inPin) < 0)
                                        {
                                            throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo");
                                        }
                                        Marshal.ReleaseComObject(inPin);

                                        // Finally, connect the grabber to the audio renderer
                                        outPin = Tools.GetOutPin(grabberBaseAudio, 0);
                                        graph.Render(outPin);

                                        AMMediaType amt = new AMMediaType();
                                        outPin.ConnectionMediaType(amt);
                                        if (!Tools.IsPinConnected(outPin))
                                        {
                                            throw new ApplicationException("Failed obtaining media audio information");
                                        }
                                        wavFormat = new WaveFormatEx();
                                        Marshal.PtrToStructure(amt.FormatPtr, wavFormat);
                                        Marshal.ReleaseComObject(outPin);

                                        // configure sample grabber
                                        sampleGrabberAudio.SetBufferSamples(false);
                                        sampleGrabberAudio.SetOneShot(false);
                                        sampleGrabberAudio.SetCallback(grabberAudio, 1);

                                        audioGrabberIsConnected = true;

                                        break;
                                    }
                                }
                            }
                        }
                    }
                    // *****************************************************************
                }

                // let's do the rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBaseVideo, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabberVideo.SetBufferSamples(false);
                sampleGrabberVideo.SetOneShot(false);
                sampleGrabberVideo.SetCallback(grabberVideo, 1);

                // disable clock, if someone requested it
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media seek control
                mediaSeekControl = (IMediaSeeking)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;

                // get media audio control
                basicAudio = (IBasicAudio)graphObject;
            } catch (Exception exception) {
                DestroyFilters();

                // provide information to clients
                VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message));
            }
        }
コード例 #23
0
        public void WorkerThread()
        {
            bool              flag      = false;
            Grabber           pCallback = new Grabber(this);
            object            o         = null;
            object            obj3      = null;
            object            obj4      = null;
            IGraphBuilder     builder   = null;
            IBaseFilter       pFilter   = null;
            IBaseFilter       filter2   = null;
            ISampleGrabber    grabber2  = null;
            IFileSourceFilter filter3   = null;
            IMediaControl     control   = null;
            IMediaEventEx     ex        = null;

            while (!flag && !this.stopEvent.WaitOne(0, true))
            {
                try
                {
                    try
                    {
                        Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                        if (typeFromCLSID == null)
                        {
                            throw new ApplicationException("Failed creating filter graph");
                        }
                        o             = Activator.CreateInstance(typeFromCLSID);
                        builder       = (IGraphBuilder)o;
                        typeFromCLSID = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource);
                        if (typeFromCLSID == null)
                        {
                            throw new ApplicationException("Failed creating WM source");
                        }
                        obj3          = Activator.CreateInstance(typeFromCLSID);
                        pFilter       = (IBaseFilter)obj3;
                        typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                        if (typeFromCLSID == null)
                        {
                            throw new ApplicationException("Failed creating sample grabber");
                        }
                        obj4     = Activator.CreateInstance(typeFromCLSID);
                        grabber2 = (ISampleGrabber)obj4;
                        filter2  = (IBaseFilter)obj4;
                        builder.AddFilter(pFilter, "source");
                        builder.AddFilter(filter2, "grabber");
                        AMMediaType pmt = new AMMediaType {
                            majorType = MediaType.Video,
                            subType   = MediaSubType.RGB24
                        };
                        grabber2.SetMediaType(pmt);
                        filter3 = (IFileSourceFilter)obj3;
                        filter3.Load(this.source, null);
                        if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0)
                        {
                            throw new ApplicationException("Failed connecting filters");
                        }
                        if (grabber2.GetConnectedMediaType(pmt) == 0)
                        {
                            VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader));
                            pCallback.Width  = header.BmiHeader.Width;
                            pCallback.Height = header.BmiHeader.Height;
                            pmt.Dispose();
                        }
                        builder.Render(DSTools.GetOutPin(filter2, 0));
                        grabber2.SetBufferSamples(false);
                        grabber2.SetOneShot(false);
                        grabber2.SetCallback(pCallback, 1);
                        IVideoWindow window = (IVideoWindow)o;
                        window.put_AutoShow(false);
                        window  = null;
                        ex      = (IMediaEventEx)o;
                        control = (IMediaControl)o;
                        control.Run();
                        while (!this.stopEvent.WaitOne(0, true))
                        {
                            int num;
                            int num2;
                            int num3;
                            Thread.Sleep(100);
                            if (ex.GetEvent(out num, out num2, out num3, 0) == 0)
                            {
                                ex.FreeEventParams(num, num2, num3);
                                if (num == 1)
                                {
                                    break;
                                }
                            }
                        }
                        control.StopWhenReady();
                    }
                    catch (Exception)
                    {
                        flag = true;
                    }
                    continue;
                }
                finally
                {
                    ex       = null;
                    control  = null;
                    filter3  = null;
                    builder  = null;
                    pFilter  = null;
                    filter2  = null;
                    grabber2 = null;
                    if (o != null)
                    {
                        Marshal.ReleaseComObject(o);
                        o = null;
                    }
                    if (obj3 != null)
                    {
                        Marshal.ReleaseComObject(obj3);
                        obj3 = null;
                    }
                    if (obj4 != null)
                    {
                        Marshal.ReleaseComObject(obj4);
                        obj4 = null;
                    }
                }
            }
        }
コード例 #24
0
        void RunWorker()
        {
            try
            {
                graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                sourceObject = FilterInfo.CreateFilter(deviceMoniker);

                grabber       = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                grabberObject = grabber as IBaseFilter;

                graph.AddFilter(sourceObject, "source");
                graph.AddFilter(grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType   = MediaSubTypes.RGB32;
                    grabber.SetMediaType(mediaType);

                    if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                            capGrabber.Width  = header.BmiHeader.Width;
                            capGrabber.Height = header.BmiHeader.Height;
                        }
                    }
                    graph.Render(grabberObject.GetPin(PinDirection.Output, 0));
                    grabber.SetBufferSamples(false);
                    grabber.SetOneShot(false);
                    grabber.SetCallback(capGrabber, 1);

                    IVideoWindow wnd = (IVideoWindow)graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    control = (IMediaControl)graph;
                    control.Run();

                    while (!stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex);
            }
            finally
            {
                graph         = null;
                sourceObject  = null;
                grabberObject = null;
                grabber       = null;
                capGrabber    = null;
                control       = null;
            }
        }
コード例 #25
0
ファイル: CapDevice.cs プロジェクト: rverhag/SimpleGrblgui
        /// <summary>;
        /// Starts grabbing images from the capture device
        /// </summary>
        public virtual void Start()
        {
            if (_captureTask != null)
            {
                Stop();
            }

            _captureTask = new Task(() =>
            {
                // Create new grabber
                _capGrabber = new CapGrabber();
                _capGrabber.PropertyChanged += capGrabber_PropertyChanged;
                _capGrabber.NewFrameArrived += capGrabber_NewFrameArrived;
                _stopSignal = new ManualResetEvent(false);

                _graph        = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IFilterGraph2;
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                var outputPin = _sourceObject.GetPin(PinCategory.Capture, 0);
                SelectWebcamResolution(outputPin);

                _grabber       = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                if (_graph == null)
                {
                    return;
                }
                ;

                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");
                using (var mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType   = MediaSubTypes.RGB32;
                    if (_grabber != null)
                    {
                        _grabber.SetMediaType(mediaType);


                        var inputPin = _grabberObject.GetPin(PinDirection.Input, 0);
                        if (_graph.Connect(outputPin, inputPin) >= 0)
                        {
                            if (_grabber.GetConnectedMediaType(mediaType) == 0)
                            {
                                var header         = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                _capGrabber.Width  = header.BmiHeader.Width;
                                _capGrabber.Height = header.BmiHeader.Height;
                            }
                        }
                        _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                        _grabber.SetBufferSamples(false);
                        _grabber.SetOneShot(false);
                        _grabber.SetCallback(_capGrabber, 1);
                    }

                    // Get the video window
                    var wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);

                    // Create the control and run
                    _control = (IMediaControl)_graph;

                    _control.Run();

                    // Wait for the stop signal
                    _stopSignal.WaitOne();
                    Cleanup();
                }
            });
            _captureTask.Start();
        }
コード例 #26
0
        /// <summary> build the capture graph for grabber. </summary>
        private bool SetupGraph()
        {
            const int WS_CHILD        = 0x40000000;
            const int WS_CLIPCHILDREN = 0x02000000;
            const int WS_CLIPSIBLINGS = 0x04000000;

            int hr;

            hr = _capGraphBuilder2.SetFiltergraph(_graphBuilder);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            hr = _graphBuilder.AddFilter(_capFilter, "Ds.NET Video Capture Device");
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }


            // will thow up user input for quality
            //DsUtils.ShowCapPinDialog(_capGraphBuilder2, _capFilter, IntPtr.Zero);

            AMMediaType media = new AMMediaType();

            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;            //
            hr = _sampGrabber.SetMediaType(media);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            hr = _graphBuilder.AddFilter(_baseGrabFilter, "Ds.NET Grabber");
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            Guid cat;
            Guid med;

            cat = PinCategory.Capture;
            med = MediaType.Video;
            hr  = _capGraphBuilder2.RenderStream(ref cat, ref med, _capFilter, null, _baseGrabFilter); // _baseGrabFilter

            media = new AMMediaType();
            hr    = _sampGrabber.GetConnectedMediaType(media);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            _videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

            hr = _sampGrabber.SetBufferSamples(false);
            if (hr == 0)
            {
                hr = _sampGrabber.SetOneShot(false);
            }
            if (hr == 0)
            {
                hr = _sampGrabber.SetCallback(null, 0);
            }
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }



            // Render preview (video -> renderer)


            hr = _capGraphBuilder2.RenderStream(PinCategory.Preview, ref med, _capFilter, null, null);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }


            // Get the IVideoWindow interface
            _videoWindow = (IVideoWindow)_graphBuilder;
            // Set the video window to be a child of the main window
            hr = _videoWindow.put_Owner(this._viewControl.Handle);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Set video window style
            hr = _videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            // Position video window in client rect of owner window
            _viewControl.Resize += new EventHandler(onPreviewWindowResize);
            onPreviewWindowResize(this, null);

            //Make the video window visible, now that it is properly positioned
            hr = _videoWindow.put_Visible(DsHlp.OATRUE);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            hr = _mediaCtrl.Run();
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            return(true);
        }
コード例 #27
0
ファイル: CapDevice.cs プロジェクト: Shujee/WebCam2
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();

                    // Wait for the stop signal
                    while (!_stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    _control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                Release();
            }
        }
コード例 #28
0
ファイル: VideoSource.cs プロジェクト: Wiladams/NewTOAPIA
        /// <summary> Read and store the properties </summary>
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();
            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
            Marshal.PtrToStructure(media.formatPtr, videoInfoHeader);

            //VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            videoInfoHeader.BmiHeader.Width = 320;
            videoInfoHeader.BmiHeader.Height = 240;
            videoWidth = videoInfoHeader.BmiHeader.Width;
            videoHeight = videoInfoHeader.BmiHeader.Height;
            fStride = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);
            fPixelArray = new byte[fStride * videoHeight];

            // Copy the media structure back
            Marshal.StructureToPtr(videoInfoHeader, media.formatPtr, false);
            hr = sampleGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
コード例 #29
0
        /// <summary>
        /// build the capture graph for grabber.
        /// </summary>
        private void SetupGraph()
        {
            int  hr;
            Guid cat;
            Guid med;

            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB32;
                media.formatType = FormatType.VideoInfo;                // ???

                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                object o;
                cat = PinCategory.Capture;
                med = MediaType.Video;
                Guid iid = typeof(IAMStreamConfig).GUID;
                hr = capGraph.FindInterface(
                    ref cat, ref med, capFilter, ref iid, out o);

                videoStreamConfig = o as IAMStreamConfig;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                BitmapInfoHeader bmiHeader;
                bmiHeader        = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                bmiHeader.Width  = cameraWidth;
                bmiHeader.Height = cameraHeight;
                setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader);

                bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                if (bmiHeader.Width != cameraWidth)
                {
                    throw new GoblinException("Could not change the resolution to " + cameraWidth + "x" +
                                              cameraHeight + ". The resolution has to be " + bmiHeader.Width + "x" +
                                              bmiHeader.Height);
                }

                cat = PinCategory.Preview;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;
            }
            catch (Exception ee)
            {
                throw new GoblinException("Could not setup graph\r\n" + ee.Message);
            }
        }
コード例 #30
0
ファイル: CaptureDevices.cs プロジェクト: Tob1112/405sentry
        // Thread entry point
        public void WorkerThread()
        {
            int hr;
            Guid cat;
            Guid med;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder graphBuilder = null;
            ICaptureGraphBuilder2 captureGraphBuilder = null;
            IBaseFilter videoDeviceFilter = null;
            IBaseFilter grabberFilter = null;
            ISampleGrabber sg = null;
            IMediaControl mc = null;

            try
            {
                // Make a new filter graph
                graphObj = Activator.CreateInstance(
                Type.GetTypeFromCLSID(Clsid.FilterGraph, true));
                graphBuilder = (IGraphBuilder)graphObj;

                // Get the Capture Graph Builder
                Guid clsid = Clsid.CaptureGraphBuilder2;
                Guid riid = typeof(ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (ICaptureGraphBuilder2)
                TempFix.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Get the video device and add it to the filter graph
                if (source != null)
                {
                    videoDeviceFilter = (IBaseFilter)
                    Marshal.BindToMoniker(source);
                    hr = graphBuilder.AddFilter(videoDeviceFilter,
                    "Video Capture Device");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // create sample grabber, object and filter
                grabberObj = Activator.CreateInstance(
                Type.GetTypeFromCLSID(Clsid.SampleGrabber, true));
                grabberFilter = (IBaseFilter)grabberObj;
                sg = (ISampleGrabber)grabberObj;

                // add sample grabber filter to filter graph
                hr = graphBuilder.AddFilter(grabberFilter, "grabber");
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                // Try looking for an video device interleaved media type
                IBaseFilter testFilter = videoDeviceFilter;
                // grabberFilter (not supported)
                object o;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                Guid iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                ref cat, ref med, testFilter, ref iid, out o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, testFilter, ref iid, out o);

                    if (hr != 0)
                        o = null;
                }
                // Set the video stream configuration to data member
                videoStreamConfig = o as IAMStreamConfig;
                o = null;

                // Experimental testing: Try to set the Frame Size & Rate
                // Results: When enabled, the grabber video breaks up into
                // several duplicate frames (6 frames)
                bool bdebug = true;
                if (bdebug)
                {
                    BitmapInfoHeader bmiHeader;
                    bmiHeader = (BitmapInfoHeader)
                    getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                    bmiHeader.Width = framesize.Width;
                    bmiHeader.Height = framesize.Height;
                    setStreamConfigSetting(videoStreamConfig,
                    "BmiHeader", bmiHeader);

                    long avgTimePerFrame = (long)(10000000 / framerate);
                    setStreamConfigSetting(videoStreamConfig,
                    "AvgTimePerFrame", avgTimePerFrame);
                }

                // connect pins (Turns on the video device)
                if (graphBuilder.Connect(DSTools.GetOutPin(
                videoDeviceFilter, 0),
                DSTools.GetInPin(grabberFilter, 0)) < 0)
                    throw new ApplicationException(
                    "Failed connecting filters");

                // Set the sample grabber media type settings
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType = MediaSubType.RGB24;
                sg.SetMediaType(mt);

                // get media type
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));
                    System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height);
                    grabber.Width = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mt.Dispose();
                }

                // render
                graphBuilder.Render(DSTools.GetOutPin(grabberFilter, 0));

                // Set various sample grabber properties
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                // Do not show active (source) window
                IVideoWindow win = (IVideoWindow)graphObj;
                win.put_AutoShow(false);
                win = null;

                // get media control
                mc = (IMediaControl)graphObj;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                System.Diagnostics.Debug.WriteLine("----: " + e.Message);
            }
            // finalization block
            finally
            {
                // release all objects
                mc = null;
                graphBuilder = null;
                captureGraphBuilder = null;
                videoDeviceFilter = null;
                grabberFilter = null;
                sg = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
コード例 #31
0
ファイル: WebCam.cs プロジェクト: mrLunatic/WebCam
        /// <summary>
        /// Open a new video feed (either web-cam or video file).
        /// </summary>
        /// <param name="filter">Specifies the web-cam filter to use, or <i>null</i> when opening a video file.</param>
        /// <param name="pb">Specifies the output window, or <i>null</i> when running headless and only receiving snapshots.</param>
        /// <param name="strFile">Specifies the video file to use, or <i>null</i> when opening a web-cam feed.</param>
        /// <returns></returns>
        public long Open(Filter filter, PictureBox pb, string strFile)
        {
            int hr;

            if (filter != null && strFile != null)
            {
                throw new ArgumentException("Both the filter and file are non NULL - only one of these can be used at a time; The filter is used with the web-cam and the file is used with a video file.");
            }

            m_selectedFilter = filter;
            m_graphBuilder   = (IFilterGraph2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

            // When using a web-cam, create the moniker for the filter and add the filter to the graph.
            if (strFile == null)
            {
                IMoniker moniker = m_selectedFilter.CreateMoniker();
                m_graphBuilder.AddSourceFilterForMoniker(moniker, null, m_selectedFilter.Name, out m_camFilter);
                Marshal.ReleaseComObject(moniker);
                m_camControl = m_camFilter as IAMCameraControl;

                // Create the capture builder used to build the web-cam filter graph.
                m_captureGraphBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2, true));
                hr = m_captureGraphBuilder.SetFiltergraph(m_graphBuilder as IGraphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Add the web-cam filter to the graph.
                hr = m_graphBuilder.AddFilter(m_camFilter, m_selectedFilter.Name);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }
            else
            {
                // Build the graph with the video file.
                hr = m_graphBuilder.RenderFile(strFile, null);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                m_mediaSeek = m_graphBuilder as IMediaSeeking;

                if (pb != null)
                {
                    m_videoFrameStep = m_graphBuilder as IVideoFrameStep;
                }
            }

            // Create the sample grabber used to get snapshots.
            m_sampleGrabber  = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true));
            m_baseGrabFilter = m_sampleGrabber as IBaseFilter;
            m_mediaControl   = m_graphBuilder as IMediaControl;

            // When using a target window, get the video window used with the target output window
            if (pb != null)
            {
                m_mediaEventEx = m_graphBuilder as IMediaEventEx;
                m_videoWindow  = m_graphBuilder as IVideoWindow;
            }
            // Otherwise create the null renderer for no video output is needed (only snapshots).
            else
            {
                m_nullRenderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.NullRenderer, true));
            }

            // Add the sample grabber to the filte graph.
            hr = m_graphBuilder.AddFilter(m_baseGrabFilter, "Ds.Lib Grabber");
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Turn off the sample grabber buffers.
            hr = m_sampleGrabber.SetBufferSamples(false);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Turn off the sample grabber one-shot.
            hr = m_sampleGrabber.SetOneShot(false);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Turn ON the sample grabber callback where video data is to be received.
            hr = m_sampleGrabber.SetCallback(this, 1);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Set the media format used by the sample grabber.
            AMMediaType media = new AMMediaType();

            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            hr = m_sampleGrabber.SetMediaType(media);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // Connect the WebCam Filters and Frame Grabber.
            if (m_selectedFilter != null)
            {
                Guid cat;
                Guid med;

                cat = PinCategory.Preview;
                med = MediaType.Video;
                hr  = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, null);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, m_baseGrabFilter);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }
            // Connect the Frame Grabber and (optionally the Null Renderer)
            else
            {
                // Get the video decoder and its pins.
                m_videoFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Decoder", false);

                IPin pOutput;
                hr = Utility.GetPin(m_videoFilter, PinDirection.Output, out pOutput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                IPin pInput;
                hr = pOutput.ConnectedTo(out pInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                PinInfo pinInfo;
                hr = pInput.QueryPinInfo(out pinInfo);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Get the sample grabber pins.
                IPin pGrabInput;
                hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Input, out pGrabInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                IPin pGrabOutput;
                hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Disconnect the source filter output and the input it is connected to.
                hr = pOutput.Disconnect();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = pInput.Disconnect();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Connect the source output to the Grabber input.
                hr = m_graphBuilder.Connect(pOutput, pGrabInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // When rendering video output, connect the Grabber output to the original downstream input that the source was connected to.
                if (m_nullRenderer == null)
                {
                    hr = m_graphBuilder.Connect(pGrabOutput, pInput);
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                }

                Marshal.ReleaseComObject(pOutput);
                Marshal.ReleaseComObject(pInput);
                Marshal.ReleaseComObject(pGrabInput);
                Marshal.ReleaseComObject(pGrabOutput);
            }

            // Remove sound filters.
            IBaseFilter soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Audio Decoder", false);

            if (soundFilter != null)
            {
                hr = m_graphBuilder.RemoveFilter(soundFilter);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Marshal.ReleaseComObject(soundFilter);
            }

            soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Sound", false);
            if (soundFilter != null)
            {
                hr = m_graphBuilder.RemoveFilter(soundFilter);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Marshal.ReleaseComObject(soundFilter);
            }

            // When using a headless (no video rendering) setup, connect the null renderer to the Sample Grabber.
            if (m_nullRenderer != null)
            {
                // Add the null renderer.
                hr = m_graphBuilder.AddFilter(m_nullRenderer, "Null Renderer");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Get the sample grabber output pin.
                IPin pGrabOutput;
                hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Get the null renderer input pin.
                IPin pInput;
                hr = Utility.GetPin(m_nullRenderer, PinDirection.Input, out pInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Disconnect the sample grabber pin.
                hr = pGrabOutput.Disconnect();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Connect the Grabber output to the null renderer.
                hr = m_graphBuilder.Connect(pGrabOutput, pInput);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Marshal.ReleaseComObject(pInput);
                Marshal.ReleaseComObject(pGrabOutput);

                // Remove the Video Renderer for it is no longer needed.
                IBaseFilter ivideorender = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Renderer");
                if (ivideorender != null)
                {
                    m_graphBuilder.RemoveFilter(ivideorender);
                    Marshal.ReleaseComObject(ivideorender);
                }
            }

            // Get the sample grabber media settings and video header.
            media = new AMMediaType();
            hr    = m_sampleGrabber.GetConnectedMediaType(media);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            if ((media.formatType != FormatType.VideoInfo &&
                 media.formatType != FormatType.WaveEx &&
                 media.formatType != FormatType.MpegVideo) ||
                media.formatPtr == IntPtr.Zero)
            {
                throw new Exception("Media grabber format is unknown.");
            }

            // Get the video header with frame sizing information.
            m_videoInfoHeader = Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)) as VideoInfoHeader;
            Marshal.FreeCoTaskMem(media.formatPtr);
            media.formatPtr = IntPtr.Zero;

            // If we are rendering video output, setup the video window (which requires a message pump).
            if (m_videoWindow != null)
            {
                // setup the video window
                hr = m_videoWindow.put_Owner(pb.Handle);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = m_videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }


                // resize the window
                hr = m_videoWindow.SetWindowPosition(0, 0, pb.Width, pb.Height);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = m_videoWindow.put_Visible(DsHlp.OATRUE);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }


            // start the capturing
            hr = m_mediaControl.Run();
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            // When using a video file, immediately stop at the start.
            if (strFile != null)
            {
                hr = m_mediaControl.Pause();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }

            // When using a media file, we need to save the video file's duration.
            if (m_mediaSeek != null)
            {
                hr = m_mediaSeek.GetDuration(out m_lDuration);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
            }

            m_bConnected = true;

            return(m_lDuration);
        }
コード例 #32
0
ファイル: DirectShowCamera.cs プロジェクト: DeSciL/Ogama
    /// <summary>
    /// Saves the video properties of the SampleGrabber into member fields
    /// and creates a file mapping for the captured frames.
    /// </summary>
    /// <param name="sampGrabber">The <see cref="ISampleGrabber"/>
    /// from which to retreive the sample information.</param>
    private void SaveSizeInfo(ISampleGrabber sampGrabber)
    {
      int hr;

      // Get the media type from the SampleGrabber
      var media = new AMMediaType();
      hr = sampGrabber.GetConnectedMediaType(media);

      //if (hr != 0)
      //{
      //    ErrorLogger.WriteLine("Could not SaveSizeInfo in Camera.Capture. Message: " + DsError.GetErrorText(hr));
      //}

      //if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
      //{
      //    ErrorLogger.WriteLine("Error in Camera.Capture. Unknown Grabber Media Format");
      //}

      // Grab the size info
      var videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
      width = videoInfoHeader.BmiHeader.Width;
      height = videoInfoHeader.BmiHeader.Height;
      stride = width * (videoInfoHeader.BmiHeader.BitCount / 8);
      this.fps = (int)(10000000 / videoInfoHeader.AvgTimePerFrame);

      bufferLength = width * height * 3; // RGB24 = 3 bytes

      // create memory section and map for the OpenCV Image.
      section = CreateFileMapping(new IntPtr(-1), IntPtr.Zero, 0x04, 0, (uint)bufferLength, null);
      map = MapViewOfFile(section, 0xF001F, 0, 0, (uint)bufferLength);
      videoImage = new Image<Bgr, byte>(width, height, stride, map);

      DsUtils.FreeAMMediaType(media);
      media = null;
    }
コード例 #33
0
        /// <summary>
        ///  Connects the filters of a previously created graph
        ///  (created by CreateGraph()). Once rendered the graph
        ///  is ready to be used. This method may also destroy
        ///  streams if we have streams we no longer want.
        /// </summary>
        void RenderGraph()
        {
            var didSomething = false;

            // Stop the graph
            _mediaControl?.Stop();

            // Create the graph if needed (group should already be created)
            CreateGraph();

            // Derender the graph if we have a capture or preview stream
            // that we no longer want. We can't derender the capture and
            // preview streams seperately.
            // Notice the second case will leave a capture stream intact
            // even if we no longer want it. This allows the user that is
            // not using the preview to Stop() and Start() without
            // rerendering the graph.
            if (!_wantPreviewRendered && _isPreviewRendered)
            {
                DerenderGraph();
            }

            // Render preview stream (only if necessary)
            if (_wantPreviewRendered && !_isPreviewRendered)
            {
                // Render preview (video -> renderer)
                var cat = PinCategory.Preview;
                var med = MediaType.Video;
                var hr  = _captureGraphBuilder.RenderStream(cat, med, _videoDeviceFilter, _baseGrabFlt, null);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Get the IVideoWindow interface
                _videoWindow = (IVideoWindow)_graphBuilder;

                // Set the video window to be a child of the main window
                hr = _videoWindow.put_Owner(PreviewWindow);

                _videoWindow.put_MessageDrain(_form.Handle);

                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Set video window style
                hr = _videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings);

                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Make the video window visible, now that it is properly positioned
                hr = _videoWindow.put_Visible(OABool.True);

                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                _isPreviewRendered = true;
                didSomething       = true;

                var media = new AMMediaType();
                hr = _sampGrabber.GetConnectedMediaType(media);

                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                if (media.formatType != FormatType.VideoInfo || media.formatPtr == IntPtr.Zero)
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                _videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;
            }

            if (didSomething)
            {
                _actualGraphState = GraphState.Rendered;
            }
        }
コード例 #34
0
        /// <summary> build the capture graph. </summary>
        bool SetupGraph()
        {
            int             hr;
            IBaseFilter     mux  = null;
            IFileSinkFilter sink = null;


            try
            {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;        // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }


                Guid cat = PinCategory.Preview;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // preview
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // capture
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee)
            {
                MessageBox.Show(this, "Could not setup graph\r\n" + ee.Message, "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop);
                return(false);
            }
            finally
            {
                if (mux != null)
                {
                    Marshal.ReleaseComObject(mux);
                }
                mux = null;
                if (sink != null)
                {
                    Marshal.ReleaseComObject(sink);
                }
                sink = null;
            }
        }
コード例 #35
0
ファイル: VideoCaptureManager.cs プロジェクト: jimu/ZunTzu
        private void setupDirectShowFilterGraph()
        {
            if (mediaControl != null && running)
            {
                Stop();
            }

            if (device == null)
            {
                filterGraph  = null;
                mediaControl = null;
            }
            else
            {
                filterGraph  = (IFilterGraph2) new FilterGraph();
                mediaControl = (IMediaControl)filterGraph;
                ICaptureGraphBuilder2 captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
                captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph);

                // capture filter
                IBaseFilter captureFilter;
                filterGraph.AddSourceFilterForMoniker(device.Moniker, null, device.Name, out captureFilter);

                // sample grabber
                ISampleGrabber sampleGrabber       = (ISampleGrabber) new SampleGrabber();
                IBaseFilter    sampleGrabberFilter = (IBaseFilter)sampleGrabber;
                AMMediaType    mediaType           = new AMMediaType();
                mediaType.majorType  = new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);                    // MEDIATYPE_Video
                mediaType.subType    = new Guid(0xe436eb7d, 0x524f, 0x11ce, 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70);                    // MEDIASUBTYPE_RGB24
                mediaType.formatType = new Guid(0x05589f80, 0xc356, 0x11ce, 0xbf, 0x01, 0x00, 0xaa, 0x00, 0x55, 0x59, 0x5a);                    // FORMAT_VideoInfo
                sampleGrabber.SetMediaType(mediaType);
                mediaType.Free();
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetCallback(this, 1);
                filterGraph.AddFilter(sampleGrabberFilter, "ZunTzu Sample Grabber");

                // configure the video stream to 160x120@15fps
                object interfaceFound;
                captureGraphBuilder.FindInterface(
                    new Guid(0xfb6c4281, 0x0353, 0x11d1, 0x90, 0x5f, 0x00, 0x00, 0xc0, 0xcc, 0x16, 0xba),                       // PIN_CATEGORY_CAPTURE
                    new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71),                       // MEDIATYPE_Video
                    captureFilter, typeof(IAMStreamConfig).GUID, out interfaceFound);
                IAMStreamConfig videoStreamConfig = (IAMStreamConfig)interfaceFound;
                videoStreamConfig.GetFormat(out mediaType);
                VideoInfoHeader infoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(mediaType.formatPtr, infoHeader);
                infoHeader.AvgTimePerFrame  = 10000000 / 15;
                infoHeader.BmiHeader.Width  = 160;
                infoHeader.BmiHeader.Height = 120;
                Marshal.StructureToPtr(infoHeader, mediaType.formatPtr, false);
                videoStreamConfig.SetFormat(mediaType);
                mediaType.Free();

                // renderer
                IBaseFilter nullRenderer = (IBaseFilter) new NullRenderer();
                filterGraph.AddFilter(nullRenderer, "Null Renderer");

                captureGraphBuilder.RenderStream(
                    new Guid(0xfb6c4281, 0x0353, 0x11d1, 0x90, 0x5f, 0x00, 0x00, 0xc0, 0xcc, 0x16, 0xba),                       // PIN_CATEGORY_CAPTURE
                    new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71),                       // MEDIATYPE_Video
                    captureFilter, sampleGrabberFilter, nullRenderer);

                // retrieve frame size
                sampleGrabber.GetConnectedMediaType(mediaType);
                Marshal.PtrToStructure(mediaType.formatPtr, infoHeader);
                frameRate = 10000000.0f / infoHeader.AvgTimePerFrame;
                frameSize = new Size(infoHeader.BmiHeader.Width, infoHeader.BmiHeader.Height);
                mediaType.Free();
            }
        }
コード例 #36
0
        public void WorkerThread()
        {
            Grabber        pCallback = new Grabber(this);
            object         o         = null;
            object         ppvResult = null;
            object         obj4      = null;
            IGraphBuilder  builder   = null;
            IBaseFilter    pFilter   = null;
            IBaseFilter    filter2   = null;
            ISampleGrabber grabber2  = null;
            IMediaControl  control   = null;

            try
            {
                Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }
                o       = Activator.CreateInstance(typeFromCLSID);
                builder = (IGraphBuilder)o;
                UCOMIBindCtx ppbc     = null;
                UCOMIMoniker ppmk     = null;
                int          pchEaten = 0;
                if (Win32.CreateBindCtx(0, out ppbc) == 0)
                {
                    if (Win32.MkParseDisplayName(ppbc, this.source, ref pchEaten, out ppmk) == 0)
                    {
                        Guid gUID = typeof(IBaseFilter).GUID;
                        ppmk.BindToObject(null, null, ref gUID, out ppvResult);
                        Marshal.ReleaseComObject(ppmk);
                        ppmk = null;
                    }
                    Marshal.ReleaseComObject(ppbc);
                    ppbc = null;
                }
                if (ppvResult == null)
                {
                    throw new ApplicationException("Failed creating device object for moniker");
                }
                pFilter       = (IBaseFilter)ppvResult;
                typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (typeFromCLSID == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }
                obj4     = Activator.CreateInstance(typeFromCLSID);
                grabber2 = (ISampleGrabber)obj4;
                filter2  = (IBaseFilter)obj4;
                builder.AddFilter(pFilter, "source");
                builder.AddFilter(filter2, "grabber");
                AMMediaType pmt = new AMMediaType {
                    majorType = MediaType.Video,
                    subType   = MediaSubType.RGB24
                };
                grabber2.SetMediaType(pmt);
                if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }
                if (grabber2.GetConnectedMediaType(pmt) == 0)
                {
                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader));
                    pCallback.Width  = header.BmiHeader.Width;
                    pCallback.Height = header.BmiHeader.Height;
                    pmt.Dispose();
                }
                builder.Render(DSTools.GetOutPin(filter2, 0));
                grabber2.SetBufferSamples(false);
                grabber2.SetOneShot(false);
                grabber2.SetCallback(pCallback, 1);
                ((IVideoWindow)o).put_AutoShow(false);
                control = (IMediaControl)o;
                control.Run();
                while (!this.stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                control.StopWhenReady();
            }
            catch (Exception)
            {
            }
            finally
            {
                control  = null;
                builder  = null;
                pFilter  = null;
                filter2  = null;
                grabber2 = null;
                if (o != null)
                {
                    Marshal.ReleaseComObject(o);
                    o = null;
                }
                if (ppvResult != null)
                {
                    Marshal.ReleaseComObject(ppvResult);
                    ppvResult = null;
                }
                if (obj4 != null)
                {
                    Marshal.ReleaseComObject(obj4);
                    obj4 = null;
                }
            }
        }
コード例 #37
0
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread( )
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder  graph         = null;
            IBaseFilter    sourceBase    = null;
            IBaseFilter    grabberBase   = null;
            ISampleGrabber sampleGrabber = null;
            IMediaControl  mediaControl  = null;

            IMediaEventEx mediaEvent = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                graph.AddSourceFilter(fileName, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add grabber filters to graph
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                int pinToTry = 0;

                IPin inPin  = Tools.GetInPin(grabberBase, 0);
                IPin outPin = null;

                // find output pin acceptable by sample grabber
                while (true)
                {
                    outPin = Tools.GetOutPin(sourceBase, pinToTry);

                    if (outPin == null)
                    {
                        Marshal.ReleaseComObject(inPin);
                        throw new ApplicationException("Did not find acceptable output video pin in the given source");
                    }

                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        Marshal.ReleaseComObject(outPin);
                        outPin = null;
                        pinToTry++;
                    }
                    else
                    {
                        break;
                    }
                }

                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose( );
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // disable clock, if someone requested it
                if (!referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;
                IntPtr   p1, p2;
                DsEvCode code;

                // run
                mediaControl.Run( );

                do
                {
                    if (mediaEvent != null)
                    {
                        if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                        {
                            mediaEvent.FreeEventParams(code, p1, p2);

                            if (code == DsEvCode.Complete)
                            {
                                reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
                                break;
                            }
                        }
                    }
                }while (!stopEvent.WaitOne(100, false));

                mediaControl.Stop( );
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                mediaEvent    = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceBase != null)
                {
                    Marshal.ReleaseComObject(sourceBase);
                    sourceBase = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
コード例 #38
0
ファイル: VideoCapture.cs プロジェクト: culiniac/quavs
        /// <summary>
        /// Read and store the properties
        /// </summary>
        /// <param name="sampGrabber">The samp grabber.</param>
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();
            hr = sampGrabber.GetConnectedMediaType(media);
            checkHR(hr,"SaveSizeInfo");

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            _SGvideoWidth = videoInfoHeader.BmiHeader.Width;
            _SGvideoHeight = videoInfoHeader.BmiHeader.Height;
            _SGstride = _SGvideoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            _hud.VideoHeight = _SGvideoHeight;
            _hud.VideoWidth = _SGvideoWidth;

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
コード例 #39
0
ファイル: FileVideoSource.cs プロジェクト: Tob1112/405sentry
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread( )
        {
            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object sourceObject  = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder     graph         = null;
            IBaseFilter       sourceBase    = null;
            IBaseFilter       grabberBase   = null;
            ISampleGrabber    sampleGrabber = null;
            IMediaControl     mediaControl  = null;
            IFileSourceFilter fileSource    = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                type = Type.GetTypeFromCLSID(Clsid.AsyncReader);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter async reader");
                }

                sourceObject = Activator.CreateInstance(type);
                sourceBase   = (IBaseFilter)sourceObject;
                fileSource   = (IFileSourceFilter)sourceObject;

                fileSource.Load(fileName, null);

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add source and grabber filters to graph
                graph.AddFilter(sourceBase, "source");
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType( );
                mediaType.MajorType = MediaType.Video;
                mediaType.SubType   = MediaSubType.RGB24;
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                if (graph.Connect(Tools.GetOutPin(sourceBase, 0), Tools.GetInPin(grabberBase, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose( );
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // run
                mediaControl.Run( );

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mediaControl.StopWhenReady( );
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                sourceBase    = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                fileSource    = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceObject != null)
                {
                    Marshal.ReleaseComObject(sourceObject);
                    sourceObject = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }
        }
コード例 #40
0
        private bool SetupGraph()
        {
            int hr;

            try {
                hr = capGraph.SetFiltergraph(graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                //DsUtils.ShowCapPinDialog(capGraph, capFilter, this.Handle);

                AMMediaType media = new AMMediaType();
                media.majorType  = MediaType.Video;
                media.subType    = MediaSubType.RGB24;
                media.formatType = FormatType.VideoInfo;                // ???
                hr = sampGrabber.SetMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                Guid cat = PinCategory.Preview;
                Guid med = MediaType.Video;
                hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                cat = PinCategory.Capture;
                med = MediaType.Video;
                hr  = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                media = new AMMediaType();
                hr    = sampGrabber.GetConnectedMediaType(media);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                {
                    hr = sampGrabber.SetOneShot(false);
                }
                if (hr == 0)
                {
                    hr = sampGrabber.SetCallback(null, 0);
                }
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                return(true);
            }
            catch (Exception ee) {
                return(false);
            }
        }
コード例 #41
0
        public int BufferCB(double SampleTime, IntPtr pBuffer, int BufferLen)
        {
            DateTime now = DateTime.UtcNow;

            if ((now - _last).TotalMilliseconds > 150) // just to avoid issues with bad cameras
            {
                _last = now;
                try
                {
                    if (_width == 0)
                    {
                        var mediaType = new AMMediaType();
                        _grabber.GetConnectedMediaType(mediaType);
                        LocalVideoSourceManager.GetMediaTypeInfo(mediaType, out _height, out _width, out var _, out var _, out var _);

                        if (_width == 0 || _height == 0)
                        {
                            throw new InvalidOperationException($"Unable to GetMediaTypeInfo");
                        }
                    }

                    if (_getFrames)
                    {
                        BitmapSource image = BitmapSource.Create(
                            _width,
                            _height,
                            96,
                            96,
                            PixelFormats.Bgr24,
                            null,
                            pBuffer,
                            BufferLen,
                            _width * 3);

                        JpegBitmapEncoder encoder = new JpegBitmapEncoder {
                            QualityLevel = _width > 500 ? 30 : 50
                        };

                        encoder.Frames.Add(BitmapFrame.Create(image));

                        _stream.Position = 0;
                        encoder.Save(_stream);

                        var buffer = new byte[_stream.Position];
                        Array.Copy(_stream.GetBuffer(), buffer, _stream.Position);
                        _callback(new VideoInputPreview
                        {
                            Data = buffer,
                            W    = _width,
                            H    = _height
                        });
                    }
                    else
                    {
                        _callback(null);
                    }
                }
                catch (Exception e)
                {
                    Log.Warning(e, $"Grabbing of '{_name}' failed");
                }
            }
            return(0);
        }
コード例 #42
0
        // Thread entry point
        public void WorkerThread()
        {
            int  hr;
            Guid cat;
            Guid med;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObj   = null;
            object grabberObj = null;

            // interfaces
            IGraphBuilder graphBuilder = null;

            DShowNET.ICaptureGraphBuilder2 captureGraphBuilder = null;
            IBaseFilter    videoDeviceFilter = null;
            IBaseFilter    grabberFilter     = null;
            ISampleGrabber sg = null;
            IMediaControl  mc = null;

            try
            {
                // Make a new filter graph
                graphObj     = Activator.CreateInstance(Type.GetTypeFromCLSID(DShowNET.Clsid.FilterGraph, true));
                graphBuilder = (IGraphBuilder)graphObj;

                // Get the Capture Graph Builder
                Guid clsid = DShowNET.Clsid.CaptureGraphBuilder2;
                Guid riid  = typeof(DShowNET.ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (DShowNET.ICaptureGraphBuilder2)DShowNET.DsBugWO.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph((DShowNET.IGraphBuilder)graphBuilder);
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                int rotCookie = 0;
                DShowNET.DsROT.AddGraphToRot(graphBuilder, out rotCookie);

                // Get the video device and add it to the filter graph
                if (deviceMoniker != null)
                {
                    videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(deviceMoniker);
                    hr = graphBuilder.AddFilter(videoDeviceFilter,
                                                "Video Capture Device");
                    if (hr < 0)
                    {
                        Marshal.ThrowExceptionForHR(hr);
                    }
                }

                // create sample grabber, object and filter
                grabberObj    = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true));
                grabberFilter = (IBaseFilter)grabberObj;
                sg            = (ISampleGrabber)grabberObj;

                // add sample grabber filter to filter graph
                hr = graphBuilder.AddFilter(grabberFilter, "grabber");
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                // Try looking for an video device interleaved media type
                IBaseFilter testFilter = videoDeviceFilter;
                // grabberFilter (not supported)
                object o;
                cat = DShowNET.PinCategory.Capture;
                med = DShowNET.MediaType.Interleaved;
                Guid iid = typeof(DShowNET.IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr  = captureGraphBuilder.FindInterface(
                        ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o);

                    if (hr != 0)
                    {
                        o = null;
                    }
                }

                // Set the video stream configuration to data member
                videoStreamConfig = o as DShowNET.IAMStreamConfig;
                o = null;

                //modifies the stream size and frame rate
                if (modifyStream)
                {
                    //set size of frame
                    BitmapInfoHeader bmiHeader;
                    bmiHeader        = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                    bmiHeader.Width  = streamSize.Width;
                    bmiHeader.Height = streamSize.Height;
                    setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader);

                    //set frame rate (not supported on the cameras we have)

                    /*
                     * long avgTimePerFrame = (long)(10000000 / framerate);
                     * setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame);
                     */
                }

                // connect pins (Turns on the video device)
                if (graphBuilder.Connect((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)videoDeviceFilter, 0), (IPin)AForge.Video.DirectShow.Internals.Tools.GetInPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)) < 0)
                {
                    throw new ApplicationException("Failed connecting filters");
                }

                // Set the sample grabber media type settings
                AMMediaType mt = new AMMediaType();
                mt.majorType = MediaType.Video;
                mt.subType   = MediaSubType.RGB24;
                sg.SetMediaType(mt);

                // get media type and set sample grabber parameters
                if (sg.GetConnectedMediaType(mt) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader));
                    if (vih.BmiHeader.Compression != 0)
                    {
                        YUYV = true;
                        grabber.setYUYV(YUYV);
                    }
                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    //mt.Dispose();
                }

                // Set various sample grabber properties
                sg.SetBufferSamples(false);
                sg.SetOneShot(false);
                sg.SetCallback(grabber, 1);

                if (!preventFreezing)
                {
                    // render
                    graphBuilder.Render((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0));

                    // Do not show active (source) window
                    IVideoWindow win = (IVideoWindow)graphObj;
                    win.put_AutoShow(0);
                    win = null;
                }

                // get media control
                mc = (IMediaControl)graphBuilder;

                // run
                mc.Run();

                while (!stopEvent.WaitOne(0, true))
                {
                    Thread.Sleep(100);
                }
                mc.StopWhenReady();
            }
            // catch any exceptions
            catch (Exception e)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message));
                }
            }
            // finalization block
            finally
            {
                // release all objects
                mc                  = null;
                graphBuilder        = null;
                captureGraphBuilder = null;
                videoDeviceFilter   = null;
                grabberFilter       = null;
                sg                  = null;

                if (graphObj != null)
                {
                    Marshal.ReleaseComObject(graphObj);
                    graphObj = null;
                }
                if (grabberObj != null)
                {
                    Marshal.ReleaseComObject(grabberObj);
                    grabberObj = null;
                }
            }
        }
コード例 #43
0
ファイル: CCDDevice.cs プロジェクト: KrisJanssen/SIS
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString);

                // Create the grabber
                m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                m_grabberObject = m_isplGrabber as IBaseFilter;

                // Add the source and grabber to the main graph
                m_igrphbldGraph.AddFilter(m_sourceObject, "source");
                m_igrphbldGraph.AddFilter(m_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    m_isplGrabber.SetMediaType(mediaType);

                    if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    m_grbrCapGrabber.Width = header.BmiHeader.Width;
                                    m_grbrCapGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0));
                    m_isplGrabber.SetBufferSamples(false);
                    m_isplGrabber.SetOneShot(false);
                    m_isplGrabber.SetCallback(m_grbrCapGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    m_imedctrlControl = (IMediaControl)m_igrphbldGraph;
                    m_imedctrlControl.Run();

                    // Wait for the stop signal
                    while (!m_rstevStopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    // _control.StopWhenReady();
                    m_imedctrlControl.Stop();

                    // Wait a bit... It apparently takes some time to stop IMediaControl
                    Thread.Sleep(1000);
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                this.Release();
            }
        }
コード例 #44
0
        private void upateVideoInfo(ISampleGrabber sampGrabber)
        {
            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            int hr;
            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            this.m_video_info = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            //pUnkを含むいくつかのフィールドは無効化される。
            DsUtils.FreeAMMediaType(media);
            this._capture_mediatype = media;
        }
コード例 #45
0
ファイル: OSDVideo.cs プロジェクト: duyisu/MissionPlanner
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();
            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader =
                (VideoInfoHeader) Marshal.PtrToStructure(media.formatPtr, typeof (VideoInfoHeader));
            m_videoWidth = videoInfoHeader.BmiHeader.Width;
            m_videoHeight = videoInfoHeader.BmiHeader.Height;
            m_stride = m_videoWidth*(videoInfoHeader.BmiHeader.BitCount/8);
            m_avgtimeperframe = videoInfoHeader.AvgTimePerFrame;

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
コード例 #46
0
ファイル: WebCamClasses.cs プロジェクト: fredrikdev/CamTimer
        private void GetSizeInfo(ISampleGrabber sampleGrabber)
        {
            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();
            try {
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(media));
                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) {
                    throw new NotSupportedException(); //"Unknown Grabber Media Format");
                }

                VideoInfoHeader v = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                m_configuration.Size = new Size(v.BmiHeader.Width, v.BmiHeader.Height);
                m_configuration.BPP = v.BmiHeader.BitCount;
                //m_configuration.MediaSubtype = media.subType;
            } finally {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }