Esempio n. 1
0
        /// <summary>
        /// Capture Image
        /// </summary>
        /// <returns></returns>
        public Bitmap CaptureImage(string fileName)
        {
            captureFile = fileName;
            int hr;

            if (sampGrabber == null)
            {
                return(null);
            }

            if (savedArray == null)
            {
                int size = videoInfoHeader.BmiHeader.ImageSize;
                if ((size < 1000) || (size > 16000000))
                {
                    return(null);
                }
                savedArray = new byte[size + 64000];
            }

            captured = false;
            processingCaptureFlag = true;
            hr = sampGrabber.SetCallback(this, 1);
            while (processingCaptureFlag)
            {
                Thread.Sleep(100);
            }
            hr = sampGrabber.SetCallback(null, 0);
            return(new Bitmap(captureImage));
        }
Esempio n. 2
0
        private void CaptureBmpData()
        {
            if (_sampGrabber == null)
            {
                return;
            }

            if (_bitmapData == null)
            {
                int size = _videoInfoHeader.BmiHeader.ImageSize;
                // sanity check
                if ((size < 1000) || (size > 16000000))
                {
                    return;
                }
                _bitmapData = new byte[size + 64000];
            }

            int hr = _sampGrabber.SetCallback(this, 1);

            _reset.Reset();
            if (!_reset.WaitOne(10000, false))
            {
                throw new Exception("Timeout waiting to get picture");
            }
        }
Esempio n. 3
0
        private void StartupVideo(UCOMIMoniker mon)
        {
            int hr;

            try
            {
                GetInterfaces();

                CreateCaptureDevice(mon);

                SetupGraph();

                hr = mediaCtrl.Run();
                if (hr < 0)
                {
                    Marshal.ThrowExceptionForHR(hr);
                }

                sampGrabber.SetCallback(this, 1);
            }
            catch (Exception ee)
            {
                throw new GoblinException("Could not start video stream\r\n" + ee.Message);
            }
        }
Esempio n. 4
0
        void SetupVideoGrabber()
        {
            AMMediaType media = new AMMediaType();
            int         hr    = grabberConfig.GetConnectedMediaType(media);

            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
            if (((!media.formatType.Equals(FormatType.VideoInfo)) &&
                 (!media.formatType.Equals(FormatType.WaveEx))) ||
                (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;

            hr = grabberConfig.SetBufferSamples(false);
            if (hr == 0)
            {
                hr = grabberConfig.SetOneShot(false);
            }
            if (hr == 0)
            {
                hr = grabberConfig.SetCallback(null, 0);
            }
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            grabberConfig.SetCallback(this, 1);
        }
Esempio n. 5
0
        /// <summary> handler for toolbar button clicks. </summary>
        private void toolBar_ButtonClick(object sender, System.Windows.Forms.ToolBarButtonClickEventArgs e)
        {
            Trace.WriteLine("!!BTN: toolBar_ButtonClick");

            int hr;

            if (sampGrabber == null)
            {
                return;
            }

            if (e.Button == toolBarBtnGrab)
            {
                Trace.WriteLine("!!BTN: toolBarBtnGrab");

                if (savedArray == null)
                {
                    int size = videoInfoHeader.BmiHeader.ImageSize;
                    if ((size < 1000) || (size > 16000000))
                    {
                        return;
                    }
                    savedArray = new byte[size + 64000];
                }

                toolBarBtnSave.Enabled = false;
                Image old = pictureBox.Image;
                pictureBox.Image = null;
                if (old != null)
                {
                    old.Dispose();
                }

                toolBarBtnGrab.Enabled = false;
                captured = false;
                hr       = sampGrabber.SetCallback(this, 1);
            }
            else if (e.Button == toolBarBtnSave)
            {
                Trace.WriteLine("!!BTN: toolBarBtnSave");

                SaveFileDialog sd = new SaveFileDialog();

                string filename = "grabbedimage.BMP";
                string path     = Application.StartupPath + "\\" + filename;
                //sd.FileName = @"grabbedimage.Bmp";
                //sd.Title = "Save Image as...";
                //sd.Filter = "Bitmap file (*.tiff)|*.tiff";
                //sd.FilterIndex = 1;
                //if (sd.ShowDialog() != DialogResult.OK)
                //    return;

                pictureBox.Image.Save(path, ImageFormat.Bmp);
                Application.Exit();
            }
        }
        private void menuItem1_Click(object sender, System.EventArgs e)
        {
            int hr;
            int size = videoInfoHeader.BmiHeader.ImageSize;

            savedArray        = new byte[size + 64000];
            captured          = false;
            hr                = sampGrabber.SetCallback(this, 1);
            timer1.Enabled    = true;
            menuItem1.Enabled = false;
            menuItem2.Enabled = true;
            textBox1.ReadOnly = true;
            label2.Text       = "Sending ...";
        }
Esempio n. 7
0
        void TestSetCallback()
        {
            int  hr;
            bool bDone;

            m_imc = m_graphBuilder as IMediaControl;

            // Bitmap holding which callbacks were called
            m_Called = 0;

            for (int y = 0; y < 2; y++)
            {
                m_TestComplete.Reset();

                // Call back on the specified routine
                hr = m_isg.SetCallback(this, y);
                Marshal.ThrowExceptionForHR(hr);

                // Start running the graph
                hr = m_imc.Run();
                Marshal.ThrowExceptionForHR(hr);

                // When the event is signaled, the test is done
                bDone = m_TestComplete.WaitOne(10000, false);

                // If we timed out, we failed
                Debug.Assert(bDone, "Failed to get even one sample");

                // Stop running the graph
                hr = m_imc.Stop();
                Marshal.ThrowExceptionForHR(hr);
            }

            Debug.Assert(m_Called == 3, "Called both");
        }
Esempio n. 8
0
        /// <summary>
        /// Configures mode (mediatype, format type and etc).
        /// </summary>
        public void ConfigureMode()
        {
            AMMediaType media = new AMMediaType();

            // Set the media type to Video/RBG24
            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            int hr = m_SampleGrabber.SetMediaType(media);

            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber

            // To save current frame via SnapshotNextFrame
            //ISampleGrabber::SetCallback method
            // Note  [Deprecated. This API may be removed from future releases of Windows.]
            // http://msdn.microsoft.com/en-us/library/windows/desktop/dd376992%28v=vs.85%29.aspx
            hr = m_SampleGrabber.SetCallback(this, 1);     // 1 == WhichMethodToCallback, call the ISampleGrabberCB::BufferCB method
            DsError.ThrowExceptionForHR(hr);

            // To save current frame via SnapshotCurrentFrame
            if (m_bBufferSamplesOfCurrentFrame)
            {
                //ISampleGrabber::SetBufferSamples method
                // Note  [Deprecated. This API may be removed from future releases of Windows.]
                // http://msdn.microsoft.com/en-us/windows/dd376991
                hr = m_SampleGrabber.SetBufferSamples(true);
                DsError.ThrowExceptionForHR(hr);
            }
            return;
        }
Esempio n. 9
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            int         hr;
            AMMediaType media = new AMMediaType();

            // Set the media type
            media.majorType = MediaType.Video;
            if (bytes_per_pixel == 1)
            {
                media.subType = MediaSubType.RGB8;
            }
            else
            {
                media.subType = MediaSubType.RGB24;
            }
            media.formatType = FormatType.VideoInfo;
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 10
0
        /// <summary>
        /// サンプルグラバーの生成
        /// </summary>
        /// <returns>
        ///		生成されたサンプルグラバーを返します。
        ///	</returns>
        internal virtual ISampleGrabber CreateSampleGrabber()
        {
            ISampleGrabber grabber = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_SampleGrabber)));

            if (grabber != null)
            {
                // サンプルグラバフィルタの入力形式設定.
                // SetMediaType で必要なメディア タイプを指定します。
                //   http://msdn.microsoft.com/ja-jp/library/cc369546.aspx
                // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。
                // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。
                // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。
                // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx
                // subtype  : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx
                AM_MEDIA_TYPE media_type = new AM_MEDIA_TYPE();
                media_type.majortype  = new Guid(GUID.MEDIATYPE_Video);                 // Video
                media_type.subtype    = new Guid(GUID.MEDIASUBTYPE_RGB24);              // RGB24
                media_type.formattype = new Guid(GUID.FORMAT_VideoInfo);                // VideoInfo
                grabber.SetMediaType(media_type);
                grabber.SetBufferSamples(false);                                        // サンプルコピー 無効.
                grabber.SetOneShot(false);                                              // One Shot 無効.
                //grabber.SetCallback(this.SampleGrabberCB, 0);		// 0:SampleCB メソッドを呼び出すよう指示する.
                grabber.SetCallback(this.SampleGrabberCB, 1);                           // 1:BufferCB メソッドを呼び出すよう指示する.
                SampleGrabberCB.Notify += SampleGrabberCB_Notify;
            }
            return(grabber);
        }
Esempio n. 11
0
        /// <summary>
        /// Configure the ample grabber.
        /// </summary>
        /// <param name="sampGrabber">The sanmple grabber.</param>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int         hr;

            // Set the media type to Video/RBG24
            media            = new AMMediaType();
            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            hr = sampGrabber.SetMediaType(media);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
        }
        private void ApplyVideoInput()
        {
            Dispose();
            Frame         = new byte[(width * height) * PixelSize];
            CapturedFrame = new byte[(width * height) * PixelSize];
            PreviewFrame  = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];
            if (VideoInput == null)
            {
                return;
            }
            GraphBuilder        = (IGraphBuilder) new FilterGraph();
            CaptureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            MediaControl        = (IMediaControl)GraphBuilder;
            CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
            SampleGrabber = new SampleGrabber() as ISampleGrabber;
            GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
            SetResolution(width, height);
            GraphBuilder.AddFilter(VideoInput, "Camera");
            SampleGrabber.SetBufferSamples(false);
            SampleGrabber.SetOneShot(false);
            SampleGrabber.GetConnectedMediaType(new AMMediaType());
            SampleGrabber.SetCallback(this, 1);
            CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, VideoInput, null,
                                             SampleGrabber as IBaseFilter);

            if (UpdateThread != null)
            {
                UpdateThread.Abort();
            }
            UpdateThread = new Thread(UpdateBuffer);
            UpdateThread.Start();
            MediaControl.Run();
            Marshal.ReleaseComObject(VideoInput);
        }
Esempio n. 13
0
 private void SampleGrabberForm_Load(object sender, EventArgs e)
 {
     try
     {
         int hr = sampleGrabber.SetCallback(cb, 0);
         DsError.ThrowExceptionForHR(hr);
     }
     catch (COMException ex)
     {
         Graph.ShowCOMException(ex, "Can't set callback");
     }
     catch (Exception ex)
     {
         MessageBox.Show(ex.Message, "Exception caught while setting callback for samplegrabber");
     }
 }
Esempio n. 14
0
        /// <summary> Set the options on the sample grabber </summary>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int         hr;

            // Set the media type to Video/RBG24
            media            = new AMMediaType();
            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Choose to call BufferCB instead of SampleCB
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);

            //hr = this.sampleGrabber.SetBufferSamples(true);
            //DsError.ThrowExceptionForHR(hr);

            //hr = this.sampleGrabber.SetOneShot(true);
            //DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 15
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="sampleGrabber"></param>
        /// <param name="subType"></param>
        /// <param name="grabberCallback"></param>
        private void ConfigureSampleGrabber(ISampleGrabber sampleGrabber, Guid subType, ISampleGrabberCB grabberCallback)
        {
            AMMediaType media;
            int         hr;

            // Set the media type to Video and format as subtype guid
            media           = new AMMediaType();
            media.majorType = MediaType.Video;

            // WORKAROUND !!! In case of normal USB Web camera, it should be set RGB24 with parameter null.
            if (subType != MediaSubType.Null)
            {
                media.subType = subType;
            }
            else
            {
                media.subType = MediaSubType.RGB24;
            }
            media.formatType = FormatType.VideoInfo;

            hr = sampleGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // +EDDY_TEST
            ///sampleGrabber.SetBufferSamples( true );
            ///sampleGrabber.SetOneShot( true );
            // -EDDY_TEST

            hr = sampleGrabber.SetCallback(grabberCallback, 1);   // BufferCallback
            //hr = sampleGrabber.SetCallback( grabberCallback, 0 );   // SampleCallback
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 16
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        public VideoPlayer(string FileName, GraphicsDevice graphicsDevice)
        {
            try
            {
                currentState = VideoState.Stopped;

                filename = FileName;


                InitInterfaces();


                SampleGrabber  sg            = new SampleGrabber();
                ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber"));


                AMMediaType mt = new AMMediaType();
                mt.majorType  = MEDIATYPE_Video;    // Video
                mt.subType    = MEDIASUBTYPE_RGB24; // RGB24
                mt.formatType = FORMAT_VideoInfo;   // VideoInfo
                DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));


                DsError.ThrowExceptionForHR(gb.RenderFile(filename, null));


                DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));


                IVideoWindow pVideoWindow = (IVideoWindow)gb;
                DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));


                AMMediaType MediaType = new AMMediaType();
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                videoHeight     = pVideoHeader.BmiHeader.Height;
                videoWidth      = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate         = pVideoHeader.BitRate;
                DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration));


                videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel)
                bgrData         = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel)


                outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color);
            }
            catch
            {
                throw new Exception("Unable to Load or Play the video file");
            }
        }
Esempio n. 17
0
        public DSVideoPlayer(string filename, GraphicsDevice graphicsDevice)
        {
            try
            {
                // Open DirectShow Interfaces
                InitInterfaces();

                Info = new DSVideoInfo();
                // Create a SampleGrabber Filter and add it to the FilterGraph
                SampleGrabber  sg            = new SampleGrabber();
                ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                DsError.ThrowExceptionForHR(FG_GraphBuilder.AddFilter((IBaseFilter)sg, "Grabber"));

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();
                mt.majorType  = DSVideoInfo.MEDIATYPE_Video;    // Video
                mt.subType    = DSVideoInfo.MEDIASUBTYPE_RGB24; // RGB24
                mt.formatType = DSVideoInfo.FORMAT_VideoInfo;   // VideoInfo
                DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));

                //// Construct the rest of the FilterGraph
                DsError.ThrowExceptionForHR(FG_GraphBuilder.RenderFile(filename, null));
                Info.FileName = filename;

                //// Set SampleGrabber Properties
                DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)FG_GraphBuilder;
                DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));

                //// Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                Info.Height          = pVideoHeader.BmiHeader.Height;
                Info.Width           = pVideoHeader.BmiHeader.Width;
                Info.AvgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                Info.BitRate         = pVideoHeader.BitRate;
                DsError.ThrowExceptionForHR(FG_MediaSeeking.GetDuration(out Info.Duration));

                // Create byte arrays to hold video data
                videoFrameBytes = new byte[(Info.Height * Info.Width) * 4]; // RGBA format (4 bytes per pixel)
                bgrData         = new byte[(Info.Height * Info.Width) * 3]; // BGR24 format (3 bytes per pixel)

                // Create Output Frame Texture2D with the height and width of the video
                outputFrame = new Texture2D(graphicsDevice, Info.Width, Info.Height, 1, TextureUsage.None, SurfaceFormat.Color);
            }
            catch (Exception ex)
            {
                throw new Exception("不能加载或播放该视频: " + ex.Message);
            }
        }
Esempio n. 18
0
        private void InitAudioGrabber(IBaseFilter sourceF)
        {
            audioGrabberFilter = new SampleGrabber() as IBaseFilter;
            if (audioGrabberFilter == null)
            {
                throw new COMException("Cannot create SampleGrabber");
            }

            int hr = graph.AddFilter(audioGrabberFilter, "Audio Sample Grabber");

            DsError.ThrowExceptionForHR(hr);

            audioGrabber = audioGrabberFilter as ISampleGrabber;
            if (audioGrabber == null)
            {
                throw new COMException("Cannot obtain ISampleGrabber");
            }

            {
                AMMediaType mt = new AMMediaType();
                mt.majorType = DirectShowLib.MediaType.Audio;
                mt.subType   = DirectShowLib.MediaSubType.PCM;

                hr = audioGrabber.SetMediaType(mt);
                DsError.ThrowExceptionForHR(hr);

                DsUtils.FreeAMMediaType(mt);
            }

            hr = ConnectSampleGrabber(graph, sourceF, audioGrabberFilter);

            if (0 != hr)
            {
                // Cannot connect the audio grabber. Remove the filter from the graph.
                hr = graph.RemoveFilter(audioGrabberFilter);
                DsError.ThrowExceptionForHR(hr);

                Util.ReleaseComObject(ref audioGrabberFilter);
                audioGrabber = null;
                return;
            }

            audioNullFilter = new NullRenderer() as IBaseFilter;
            if (audioNullFilter == null)
            {
                throw new COMException("Cannot create NullRenderer");
            }

            hr = graph.AddFilter(audioNullFilter, "Null Filter");
            DsError.ThrowExceptionForHR(hr);

            hr = Util.ConnectFilters(graph, audioGrabberFilter, audioNullFilter);
            DsError.ThrowExceptionForHR(hr);

            audioGrabberCB = new SampleGrabberCB();
            hr             = audioGrabber.SetCallback(audioGrabberCB, (int)CBMethod.Sample);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 19
0
        /// <summary>
        /// Creates the filter.
        /// </summary>
        /// <returns>New filter instance.</returns>
        public IBaseFilter Build()
        {
            sampleGrabber = (ISampleGrabber) new SampleGrabber();
            ConfigSampleGrabber(sampleGrabber);

            sampleGrabber.SetCallback(filter, SampleGrabberCallbackMethod);

            return((IBaseFilter)sampleGrabber);
        }
        /// <summary>
        /// Creates the filter.
        /// </summary>
        /// <returns>New filter instance.</returns>
        public IBaseFilter Build()
        {
            sampleGrabber = (ISampleGrabber) new SampleGrabber();
            ConfigSampleGrabber(sampleGrabber);

            sampleGrabber.SetCallback(filter, SampleGrabberCallbackMethod);

            return (IBaseFilter) sampleGrabber;
        }
Esempio n. 21
0
        public int update()
        {
            //Console.WriteLine("nAvgBytesPerSec:" + mAudioInfoHeader.nAvgBytesPerSec);
            //Console.WriteLine("nBlockAlign:" + mAudioInfoHeader.nBlockAlign);
            //Console.WriteLine("nChannels:" + mAudioInfoHeader.nChannels);
            //Console.WriteLine("nSamplesPerSec:" + mAudioInfoHeader.nSamplesPerSec);
            //Console.WriteLine("wBitsPerSample:" + mAudioInfoHeader.wBitsPerSample);
            //Console.WriteLine("wFormatTag:" + mAudioInfoHeader.wFormatTag);

            /*
             *  nAvgBytesPerSecは必須平均データ転送レートを バイト/秒 単位で指定します
             *      PCM の場合、通常はサンプリングレート * nBlockAlign を指定します
             *
             *      nBlockAlign は、指定フォーマットの最小単位のデータサイズを指定します
             *      これをブロックアラインメントと呼び
             *      PCM の場合、このメンバには nChannels * wBitsPerSample / 8 を指定します
             *
             *      nAvgBytesPerSec:192000
             *      nBlockAlign:4
             *      nChannels:2 ステレオかモノラルか
             *      nSamplesPerSec:48000 サンプリングレート。1秒間に何個のサンプルをとるか
             *      wBitsPerSample:16 サンプルあたりのビット数を指定します
             *      wFormatTag:1
             *
             *      計算式:
             *      nBlockAlign = nChannels * wBitsPerSample / 8
             *      nAvgBytesPerSec = nSamplesPerSec * nBlockAlign
             */

            try
            {
                //ビデオデータのサンプリングに利用するコールバック メソッドを指定する.
                //第一引数	 ISampleGrabberCB インターフェイスへのポインタ
                //			 nullを指定するとコールバックを解除
                //第二引数	0->ISampleGrabberCB.SampleCB メソッドを利用
                //			1->ISampleGrabberCB.BufferCB メソッドを利用
                result = mAudioSampleGrabber.SetCallback(this, 1);
            }
            catch (Exception e)
            {
                SMMMessageBox.Show("エラー:音声データのサンプリングコールバックの実行に失敗しました。Error: Failed to run sampling callback of audio data." + e.ToString(), SMMMessageBoxIcon.Error);
            }
            return(result);
        }
Esempio n. 22
0
    protected void Initialize()
    {
        FrameReady            = false;
        frame                 = new Texture2D(GraphicsDevice, Width, Height, false, SurfaceFormat.Color);
        FrameBGR              = new byte[(Width * Height) * 3];
        FrameRGBA             = new byte[(Width * Height) * 4];
        FrameGrayscale        = new byte[(Width * Height)];
        FrameHalfGrayscale    = new byte[(Width / 2 * Height / 2)];
        FrameQuarterGrayscale = new byte[(Width / 4 * Height / 4)];
        GraphBuilder          = (IGraphBuilder) new FilterGraph();
        CaptureGraphBuilder   = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();
        MediaControl          = (IMediaControl)GraphBuilder;
        CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
        object         VideoInputObject = null;
        IBaseFilter    VideoInput       = null;
        IEnumMoniker   classEnum;
        ICreateDevEnum devEnum = (ICreateDevEnum) new CreateDevEnum();

        devEnum.CreateClassEnumerator(FilterCategory.VideoInputDevice, out classEnum, 0);
        Marshal.ReleaseComObject(devEnum);
        if (classEnum != null)
        {
            IMoniker[] moniker = new IMoniker[1];
            if (classEnum.Next(moniker.Length, moniker, IntPtr.Zero) == DEVICE_ID)
            {
                Guid iid = typeof(IBaseFilter).GUID;
                moniker[0].BindToObject(null, null, ref iid, out VideoInputObject);
            }
            Marshal.ReleaseComObject(moniker[0]);
            Marshal.ReleaseComObject(classEnum);
            VideoInput = (IBaseFilter)VideoInputObject;
        }
        if (VideoInput != null)
        {
            isRunning     = true;
            SampleGrabber = new SampleGrabber() as ISampleGrabber;
            GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
            AMMediaType Type = new AMMediaType()
            {
                majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo
            };
            SampleGrabber.SetMediaType(Type);
            GraphBuilder.AddFilter(VideoInput, "Camera");
            SampleGrabber.SetBufferSamples(false);
            SampleGrabber.SetOneShot(false);
            SampleGrabber.GetConnectedMediaType(new AMMediaType());
            SampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
            CaptureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter);
            UpdateThread = new Thread(UpdateBuffer);
            UpdateThread.Start();
            MediaControl.Run();
            Marshal.ReleaseComObject(VideoInput);
        }
    }
Esempio n. 23
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            int hr;

            // Call back on the SampleCB routine
            hr = sampGrabber.SetCallback(this, 0);
            Marshal.ThrowExceptionForHR(hr);

            // Only one call
            hr = sampGrabber.SetOneShot(true);
            Marshal.ThrowExceptionForHR(hr);
        }
Esempio n. 24
0
        private void OnCapture()
        {
            toolStripButtonChupHinh.Enabled = false;
            if (sampGrabber == null)
            {
                return;
            }

            if (savedArray == null)
            {
                int size = videoInfoHeader.BmiHeader.ImageSize;
                if ((size < 1000) || (size > 16000000))
                {
                    return;
                }
                savedArray = new byte[size + 64000];
            }

            captured = false;
            int hr = sampGrabber.SetCallback(this, 1);
        }
Esempio n. 25
0
        /// <summary>
        /// Common routine used by RenderTo*
        /// </summary>
        /// <param name="icgb">ICaptureGraphBuilder2 to use</param>
        /// <param name="pCallback">Callback to use (or null)</param>
        /// <param name="sType">string to use in creating filter graph object descriptions</param>
        /// <param name="pPin">Pin to connect from</param>
        /// <param name="ibfCompressor">Compressor to use, or null for none</param>
        /// <param name="pOutput">Endpoint (renderer or file writer) to connect to</param>
        protected void RenderHelper(ICaptureGraphBuilder2 icgb, CallbackHandler pCallback, string sType, IPin pPin,
                                    IBaseFilter ibfCompressor, IBaseFilter pOutput)
        {
            int         hr;
            IBaseFilter ibfSampleGrabber = null;

            try
            {
                // If no callback was provided, don't create a samplegrabber
                if (pCallback != null)
                {
                    ISampleGrabber isg = (ISampleGrabber) new SampleGrabber();
                    ibfSampleGrabber = (IBaseFilter)isg;
                    _dc.Add(ibfSampleGrabber);

                    hr = isg.SetCallback(pCallback, 1);
                    DESError.ThrowExceptionForHR(hr);

                    hr = _graph.AddFilter(ibfSampleGrabber, sType + " sample grabber");
                    DESError.ThrowExceptionForHR(hr);
                }

                // If a compressor was provided, add it to the graph and connect it up
                if (ibfCompressor != null)
                {
                    // Connect the pin.
                    hr = _graph.AddFilter(ibfCompressor, sType + " Compressor");
                    DESError.ThrowExceptionForHR(hr);

                    FilterGraphTools.ConnectFilters(_graph, pPin, ibfSampleGrabber, true);

                    FilterGraphTools.ConnectFilters(_graph, ibfSampleGrabber, ibfCompressor, true);

                    FilterGraphTools.ConnectFilters(_graph, ibfCompressor, pOutput, true);
                }
                else
                {
                    // Just connect the SampleGrabber (if any)
                    hr = icgb.RenderStream(null, null, pPin, ibfSampleGrabber, pOutput);
                    DESError.ThrowExceptionForHR(hr);
                }
            }
            finally
            {
                if (ibfSampleGrabber != null)
                {
                    Marshal.ReleaseComObject(ibfSampleGrabber);
                }
            }
        }
        public int update()
        {
            if (mVideoSampleGrabber == null)
            {
                return(-1);
            }

            int result = -1;

            try
            {
                //ビデオデータのサンプリングに利用するコールバック メソッドを指定する.
                //第一引数	 ISampleGrabberCB インターフェイスへのポインタ
                //			 nullを指定するとコールバックを解除
                //第二引数	0->ISampleGrabberCB.SampleCB メソッドを利用
                //			1->ISampleGrabberCB.BufferCB メソッドを利用
                result = mVideoSampleGrabber.SetCallback(this, 1);
            }
            catch (Exception e)
            {
                SMMMessageBox.Show("エラー:映像データのサンプリングコールバックの実行に失敗しました。Error: Failed to run sampling callback of video data." + e.ToString(), SMMMessageBoxIcon.Error);
            }
            return(result);
        }
Esempio n. 27
0
        /// <summary>
        /// 配置SampleGrabber
        /// </summary>
        /// <param name="sampGrabber"></param>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media = new AMMediaType();

            media.majorType  = MediaType.Video;
            media.subType    = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            int hr = sampGrabber.SetMediaType(media);

            DsError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(media);
            media = null;
            hr    = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 28
0
        //
        // configure the SampleGrabber filter of the graph
        //
        void ConfigSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;

            // set the media type. works with "stream" somehow...
            media           = new AMMediaType();
            media.majorType = MediaType.Stream;
            //media.subType = MediaSubType.WAVE;
            //media.formatType = FormatType.WaveEx;
            // that's the call to the ISampleGrabber interface
            sg.SetMediaType(media);
            DsUtils.FreeAMMediaType(media);
            media = null;
            // set BufferCB as the desired Callback function
            sg.SetCallback(this, 1);
        }
Esempio n. 29
0
        private void SetupSampleGrabber(ISampleGrabber sampleGrabber)
        {
            var mediaType = new DirectShowLib.AMMediaType
            {
                majorType  = MediaType.Video,
                subType    = MediaSubType.RGB24,
                formatType = FormatType.VideoInfo
            };

            int hr = sampleGrabber.SetMediaType(mediaType);

            DsUtils.FreeAMMediaType(mediaType);
            DsError.ThrowExceptionForHR(hr);

            hr = sampleGrabber.SetCallback(this, 0);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 30
0
        public CaptureForm()
        {
            InitializeComponent();

            graph_builder = (IGraphBuilder)new FilterGraph();
            media_control = (IMediaControl)graph_builder;
            events = (IMediaEventEx)graph_builder;
            grabber = (ISampleGrabber)new SampleGrabber();

            AMMediaType media_type = new AMMediaType();
            media_type.majorType = MediaType.Video;
            media_type.subType = MediaSubType.RGB24;
            grabber.SetMediaType( media_type );
            grabber.SetCallback( this, 1 );

            cbDevices.Items.AddRange( GetDevices( FilterCategory.VideoInputDevice ) );
        }
Esempio n. 31
0
        private static SampleGrabber CreateSampleGrabber(Guid majorType, Guid subType, Guid formatType,
                                                         SampleGrabberGraph.SampleGrabberCallback.BufferCBEventHandler callback)
        {
            SampleGrabber  sampleGrabber = new SampleGrabber();
            ISampleGrabber grabber       = sampleGrabber as ISampleGrabber;

            grabber.SetMediaType(new AMMediaType {
                majorType = majorType, subType = subType, formatType = formatType
            });
            grabber.SetBufferSamples(false);
            grabber.SetOneShot(false);
            grabber.SetCallback(new SampleGrabberCallback()
            {
                OnBuffer = callback
            }, 1);                                                                       // 0 = Sample, 1 = Buffer
            return(sampleGrabber);
        }
Esempio n. 32
0
        public CaptureForm()
        {
            InitializeComponent();

            graph_builder = (IGraphBuilder) new FilterGraph();
            media_control = (IMediaControl)graph_builder;
            events        = (IMediaEventEx)graph_builder;
            grabber       = (ISampleGrabber) new SampleGrabber();

            AMMediaType media_type = new AMMediaType();

            media_type.majorType = MediaType.Video;
            media_type.subType   = MediaSubType.RGB24;
            grabber.SetMediaType(media_type);
            grabber.SetCallback(this, 1);

            cbDevices.Items.AddRange(GetDevices(FilterCategory.VideoInputDevice));
        }
        private void SetupSampleGrabber(ISampleGrabber sampleGrabber)
        {
            var mediaType = new DirectShowLib.AMMediaType
            {
                majorType = MediaType.Video,
                subType = MediaSubType.RGB24,
                formatType = FormatType.VideoInfo
            };

            int hr = sampleGrabber.SetMediaType(mediaType);

            DsUtils.FreeAMMediaType(mediaType);
            DsError.ThrowExceptionForHR(hr);

            hr = sampleGrabber.SetCallback(this, 0);
            DsError.ThrowExceptionForHR(hr);
        }
        public void SetUpForTs(ISampleGrabberCB grabber, int methodToCall)
        {
            FilterGraphTools.DisconnectPins(mpeg2Demux);
            //FilterGraphTools.DisconnectPins(demodulator);
            FilterGraphTools.DisconnectPins(audioRenderer);
            FilterGraphTools.DisconnectPins(videoRenderer);
            //graphBuilder.RemoveFilter(audioRenderer);
            //graphBuilder.RemoveFilter(videoRenderer);

            sampleGrabber = (ISampleGrabber)new SampleGrabber();
            AMMediaType media = new AMMediaType();

            media.majorType = MediaType.Stream;
            media.subType = MediaSubType.Mpeg2Transport;
            media.formatType = FormatType.MpegStreams;
            sampleGrabber.SetOneShot(false);
            sampleGrabber.SetBufferSamples(true);
            int hr = sampleGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Sample Grabber");

            nullRenderer = (IBaseFilter)new NullRenderer();
            graphBuilder.AddFilter(nullRenderer, "NULL Renderer");

            IPin pinIn = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Input");
            IPin pinOut = DsFindPin.ByDirection(capture, PinDirection.Output, 0);

            IEnumMediaTypes eMedia;
            pinOut.EnumMediaTypes(out eMedia);

            AMMediaType[] mediaTypes = new AMMediaType[1];
            eMedia.Next(mediaTypes.Length, mediaTypes, IntPtr.Zero);

            hr = sampleGrabber.SetMediaType(mediaTypes[0]);
            DsError.ThrowExceptionForHR(hr);

            pinOut.Disconnect();
            PinInfo info;
            pinOut.QueryPinInfo(out info);

            hr = graphBuilder.ConnectDirect(pinOut, pinIn, mediaTypes[0]);
            //hr = graphBuilder.Connect(pinOut, pinIn);
            DsError.ThrowExceptionForHR(hr);

            // Release the Pin
            Marshal.ReleaseComObject(pinIn);

            pinIn = DsFindPin.ByName(nullRenderer, "In");
            pinOut = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Output");

            hr = graphBuilder.Connect(pinOut, pinIn);
            DsError.ThrowExceptionForHR(hr);

            sampleGrabber.SetCallback(grabber, methodToCall);

            // Release the Pin
            Marshal.ReleaseComObject(pinIn);
            pinIn = null;
        }
Esempio n. 35
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void Init()
        {
            try
            {
                log.Trace("Start worker thread");
                // Create the main graph
                _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                _sourceObject = FilterInfo.CreateFilter(_monikerString);

                // Create the grabber
                _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                _grabberObject = _grabber as IBaseFilter;

                // Add the source and grabber to the main graph
                _graph.AddFilter(_sourceObject, "source");
                _graph.AddFilter(_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    _grabber.SetMediaType(mediaType);

                    if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (_grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    _capGrabber.Width = header.BmiHeader.Width;
                                    _capGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch
                                {
                                    // Trace
                                    log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0));
                    _grabber.SetBufferSamples(false);
                    _grabber.SetOneShot(false);
                    _grabber.SetCallback(_capGrabber, 1);
                    log.Trace("_grabber set up");

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)_graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    _control = (IMediaControl)_graph;
                    _control.Run();
                    log.Trace("control runs");

                    // Wait for the stop signal
                    //while (!_stopSignal.WaitOne(0, true))
                    //{
                    //    Thread.Sleep(10);
                    //}
                }
            }catch (Exception ex)
            {
                // Trace
                log.Debug(ex);
                Release();
            }
        }
Esempio n. 36
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            var media = new AMMediaType
                            {
                                majorType = MediaType.Video,
                                subType = MediaSubType.RGB24,
                                formatType = FormatType.VideoInfo
                            };
            var hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);

            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 37
0
        public void CreateGraph()
        {
            try
            {
                int result = 0;

                // フィルタグラフマネージャ作成
                graphBuilder = new FilterGraph() as IFilterGraph2;

                // キャプチャグラフビルダ作成
                captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2;

                //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = captureGraphBuilder.SetFiltergraph(graphBuilder);
                DsError.ThrowExceptionForHR(result);

                // ソースフィルタ作成
                // キャプチャデバイスをソースフィルタに対応付ける
                captureFilter = null;
                result = graphBuilder.AddSourceFilterForMoniker(
                    _capDevice.Mon, null, _capDevice.Name, out captureFilter);
                DsError.ThrowExceptionForHR(result);

                // サンプルグラバ作成
                sampleGrabber = new SampleGrabber() as ISampleGrabber;

                // フィルタと関連付ける
                IBaseFilter grabFilter = sampleGrabber as IBaseFilter;

                // キャプチャするオーディオのフォーマットを設定
                AMMediaType amMediaType = new AMMediaType();
                amMediaType.majorType = MediaType.Audio;
                amMediaType.subType = MediaSubType.PCM;
                amMediaType.formatPtr = IntPtr.Zero;
                result = sampleGrabber.SetMediaType(amMediaType);
                DsError.ThrowExceptionForHR(result);
                DsUtils.FreeAMMediaType(amMediaType);

                // callback 登録
                sampleGrabber.SetOneShot(false);
                DsError.ThrowExceptionForHR(result);

                result = sampleGrabber.SetBufferSamples(true);
                DsError.ThrowExceptionForHR(result);

                // キャプチャするフォーマットを取得
                object o;
                result = captureGraphBuilder.FindInterface(
                    DsGuid.FromGuid(PinCategory.Capture),
                    DsGuid.FromGuid(MediaType.Audio),
                    captureFilter,
                    typeof(IAMStreamConfig).GUID, out o);
                DsError.ThrowExceptionForHR(result);
                IAMStreamConfig config = o as IAMStreamConfig;
                AMMediaType media;
                result = config.GetFormat(out media);
                DsError.ThrowExceptionForHR(result);

                WaveFormatEx wf = new WaveFormatEx();
                Marshal.PtrToStructure(media.formatPtr, wf);

                CaptureOption opt = new CaptureOption(wf);
                _sampler = new DSAudioSampler(opt);

                DsUtils.FreeAMMediaType(media);
                Marshal.ReleaseComObject(config);

                result = sampleGrabber.SetCallback(_sampler, 1);
                DsError.ThrowExceptionForHR(result);

                //grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加.
                result = graphBuilder.AddFilter(grabFilter, "Audio Grab Filter");
                DsError.ThrowExceptionForHR(result);

                //キャプチャフィルタをサンプルグラバーフィルタに接続する
                result = captureGraphBuilder.RenderStream(
                    DsGuid.FromGuid(PinCategory.Capture),
                    DsGuid.FromGuid(MediaType.Audio),
                    captureFilter, null, grabFilter);
                DsError.ThrowExceptionForHR(result);
            }
            catch (Exception ex)
            {
                System.Windows.MessageBox.Show(ex.Message);
            }
        }
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            int hr;
            AMMediaType media = new AMMediaType();

            // Set the media type
            media.majorType = MediaType.Video;
            if (bytes_per_pixel == 1)
                media.subType = MediaSubType.RGB8;
            else
                media.subType = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 39
0
        /// <summary>
        /// Worker thread that captures the images
        /// </summary>
        private void RunWorker()
        {
            try
            {
                // Create the main graph
                m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                // Create the webcam source
                m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString);

                // Create the grabber
                m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                m_grabberObject = m_isplGrabber as IBaseFilter;

                // Add the source and grabber to the main graph
                m_igrphbldGraph.AddFilter(m_sourceObject, "source");
                m_igrphbldGraph.AddFilter(m_grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    m_isplGrabber.SetMediaType(mediaType);

                    if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            // During startup, this code can be too fast, so try at least 3 times
                            int retryCount = 0;
                            bool succeeded = false;
                            while ((retryCount < 3) && !succeeded)
                            {
                                // Tried again
                                retryCount++;

                                try
                                {
                                    // Retrieve the grabber information
                                    VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                                    m_grbrCapGrabber.Width = header.BmiHeader.Width;
                                    m_grbrCapGrabber.Height = header.BmiHeader.Height;

                                    // Succeeded
                                    succeeded = true;
                                }
                                catch (Exception retryException)
                                {
                                    // Trace
                                    Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount);

                                    // Sleep
                                    Thread.Sleep(50);
                                }
                            }
                        }
                    }
                    m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0));
                    m_isplGrabber.SetBufferSamples(false);
                    m_isplGrabber.SetOneShot(false);
                    m_isplGrabber.SetCallback(m_grbrCapGrabber, 1);

                    // Get the video window
                    IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    // Create the control and run
                    m_imedctrlControl = (IMediaControl)m_igrphbldGraph;
                    m_imedctrlControl.Run();

                    // Wait for the stop signal
                    while (!m_rstevStopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    // Stop when ready
                    // _control.StopWhenReady();
                    m_imedctrlControl.Stop();

                    // Wait a bit... It apparently takes some time to stop IMediaControl
                    Thread.Sleep(1000);
                }
            }
            catch (Exception ex)
            {
                // Trace
                Trace.WriteLine(ex);
            }
            finally
            {
                // Clean up
                this.Release();
            }
        }
        private void ApplyVideoInput()
        {
            int iRet;
            Dispose();

            /*Frame = new byte[(width * height) * PixelSize];
            CapturedFrame = new byte[(width * height) * PixelSize];
            PreviewFrame = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];*/

            if (VideoInput == null)
            {
                return;
            }

            //Original Code
            GraphBuilder = (IGraphBuilder)new FilterGraph();
            CaptureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            MediaControl = (IMediaControl)GraphBuilder;
            iRet = CaptureGraphBuilder.SetFiltergraph(GraphBuilder);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetFiltergraph");

            SampleGrabber = new SampleGrabber() as ISampleGrabber;
            iRet = GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render");
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 1");

            SetResolution(width, height);
            iRet = GraphBuilder.AddFilter(VideoInput, "Camera");

            if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 2");
            iRet = SampleGrabber.SetBufferSamples(true);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetBufferSamples");
            iRet = SampleGrabber.SetOneShot(false);
            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetOneShot");

            iRet = SampleGrabber.SetCallback(this, 1);

            if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetCallback");

            iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, SampleGrabber as IBaseFilter);
            if (iRet < 0)
            {
                Console.WriteLine("TheKing--> Error Found in  CaptureGraphBuilder.RenderStream, iRet = " + iRet+", Initialization TryNumber = " + counter);
                if(counter == 1)
                    ApplyVideoInput();
            }

            //GraphBuilder.Connect()
            //iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, null);
            //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 1");

            //iRet = CaptureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter);
            //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 2, iRet = " + iRet);

            if (UpdateThread != null)
            {
                UpdateThread.Abort();
            }

            //UpdateThread = new Thread(UpdateBuffer);
            //UpdateThread.Start();

            MediaControl.Run();

            Marshal.ReleaseComObject(VideoInput);
        }
 private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
 {
     AMMediaType media;
     int hr;
     // Set the media type to Video/YUY2
     media = new AMMediaType();
     media.majorType = MediaType.Video;
     media.subType = MediaSubType.YUY2;
     media.formatType = FormatType.VideoInfo;
     hr = sampGrabber.SetMediaType(media);
     DsError.ThrowExceptionForHR(hr);
     DsUtils.FreeAMMediaType(media);
     media = null;
     hr = sampGrabber.SetCallback(this, 1);
     DsError.ThrowExceptionForHR(hr);
 }
Esempio n. 42
0
        /// <summary>
        /// Configures the sample grabber
        /// </summary>
        /// <param name="sampGrabber"></param>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int status;

            // Set the media type to Video/RBG24
            media = new AMMediaType();
            media.majorType = MediaType.Video;
            media.subType = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            status = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(status);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            status = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(status);
        }
        // --------------------- Private Methods -----------------------
        /// <summary> 
        ///  Create a new filter graph and add filters (devices, compressors, 
        ///  misc), but leave the filters unconnected. Call renderGraph()
        ///  to connect the filters.
        /// </summary>
        /// 
        protected void createGraph()
        {
            Guid cat;
            Guid med;
            int hr;
            Type comType = null;
            object comObj = null;

            // Ensure required properties are set
            if (videoDevice == null && audioDevice == null)
                throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n");

            // Skip if we are already created
            if ((int)graphState < (int)GraphState.Created)
            {
                // Garbage collect, ensure that previous filters are released
                GC.Collect();

                // Make a new filter graph
                graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

                // Get the Capture Graph Builder
                Guid clsid = Clsid.CaptureGraphBuilder2;
                Guid riid = typeof(ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (comType == null)
                    throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!");
                comObj = Activator.CreateInstance(comType);
                sampGrabber = (ISampleGrabber)comObj; comObj = null;

                baseGrabFlt = (IBaseFilter)sampGrabber;

                // Add the graph to the Running Object Table so it can be
                // viewed with GraphEdit
            #if DEBUG
                DsROT.AddGraphToRot(graphBuilder, out rotCookie);
            #endif

                AMMediaType media = new AMMediaType();
                // Get the video device and add it to the filter graph
                if (VideoDevice != null)
                {
                    videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);
                    hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                   //  Console.WriteLine("MediaEnineCheck ==> Inside StartVideoCapture.cs before MediaSudType");
                    media.majorType = MediaType.Video;
                    media.subType = MediaSubType.RGB24; //Rajib
                    media.formatType = FormatType.VideoInfo;		// ???
                    hr = sampGrabber.SetMediaType(media);
                    if (hr < 0)
                        Marshal.ThrowExceptionForHR(hr);

                    hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Get the audio device and add it to the filter graph
                if (AudioDevice != null)
                {
                    audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(AudioDevice.MonikerString);
                    hr = graphBuilder.AddFilter(audioDeviceFilter, "Audio Capture Device");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Get the video compressor and add it to the filter graph
                if (VideoCompressor != null)
                {
                    videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(VideoCompressor.MonikerString);
                    hr = graphBuilder.AddFilter(videoCompressorFilter, "Video Compressor");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Get the audio compressor and add it to the filter graph
                if (AudioCompressor != null)
                {
                    audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(AudioCompressor.MonikerString);
                    hr = graphBuilder.AddFilter(audioCompressorFilter, "Audio Compressor");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Retrieve the stream control interface for the video device
                // FindInterface will also add any required filters
                // (WDM devices in particular may need additional
                // upstream filters to function).

                // Try looking for an interleaved media type
                object o;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                Guid iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);

                    if (hr != 0)
                        o = null;
                }
                videoStreamConfig = o as IAMStreamConfig;

                // Retrieve the stream control interface for the audio device
                o = null;
                cat = PinCategory.Capture;
                med = MediaType.Audio;
                iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, audioDeviceFilter, ref iid, out o);
                if (hr != 0)
                    o = null;
                audioStreamConfig = o as IAMStreamConfig;

                // Retreive the media control interface (for starting/stopping graph)
                mediaControl = (IMediaControl)graphBuilder;

                // Reload any video crossbars
                if (videoSources != null) videoSources.Dispose(); videoSources = null;

                // Reload any audio crossbars
                if (audioSources != null) audioSources.Dispose(); audioSources = null;

                // Reload any property pages exposed by filters
                if (propertyPages != null) propertyPages.Dispose(); propertyPages = null;

                // Reload capabilities of video device
                videoCaps = null;

                // Reload capabilities of video device
                audioCaps = null;

                // Retrieve TV Tuner if available
                o = null;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                iid = typeof(IAMTVTuner).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);
                if (hr != 0)
                {
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);
                    if (hr != 0)
                        o = null;
                }
                IAMTVTuner t = o as IAMTVTuner;
                if (t != null)
                    tuner = new Tuner(t);

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;
                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                    hr = sampGrabber.SetOneShot(false);
                if (hr == 0)
                    hr = sampGrabber.SetCallback(new SampleGrabberCallback(), 1);
                if (hr < 0)
                    Marshal.ThrowExceptionForHR(hr);
                // Update the state now that we are done
                graphState = GraphState.Created;

            }
        }
Esempio n. 44
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int hr;

            media = new AMMediaType();

            media.majorType = MediaType.Audio;
            /*media.subType = MediaSubType.WAVE;
            media.formatType = FormatType.WaveEx;*/

            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1); // buffer callback (0 = Sample callback)
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 45
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int hr;

            // Set the media type to Video/RBG24
            media = new AMMediaType();
            media.majorType = MediaType.Video;
            media.subType = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            sampGrabber.SetBufferSamples(false);
            sampGrabber.SetOneShot(false);
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 46
0
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media = new AMMediaType
                                    {
                                        majorType = MediaType.Video,
                                        subType = MediaSubType.RGB24,
                                        formatType = FormatType.VideoInfo
                                    };

            // Set the media type to Video/RBG24
            int hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);

            // Configure the samplegrabber
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 47
0
        /// <summary>
        /// Set the options on the sample grabber
        /// </summary>
        /// <param name="sampGrabber">The samp grabber.</param>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            int hr;
            AMMediaType media = new AMMediaType();

            // Set the media type to Video/ARBG32
            media.majorType = MediaType.Video;
            media.subType = MediaSubType.ARGB32;
            //obsolete:
            //media.subType = MediaSubType.RGB24;

            media.formatType = FormatType.VideoInfo;
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Configure the samplegrabber callback
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
        /// <summary> Set the options on the sample grabber </summary>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber)
        {
            AMMediaType media;
            int hr;

            // Set the media type to Video/RBG24
            media = new AMMediaType();
            media.majorType = MediaType.Video;
            media.subType = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            // Choose to call BufferCB instead of SampleCB
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
        private void InitializeCapture()
        {
            graphBuilder = (IGraphBuilder)new FilterGraph();
            mediaControl = (IMediaControl)graphBuilder;

            captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();
            hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
            DsError.ThrowExceptionForHR(hr);

            IBaseFilter videoInput = GetVideoInputObject();
            if (null != videoInput)
            {
                SetConfigurations(videoInput);

                sampleGrabber = new SampleGrabber() as ISampleGrabber;
                hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render");
                DsError.ThrowExceptionForHR(hr);

                hr = graphBuilder.AddFilter(videoInput, "Camera");
                DsError.ThrowExceptionForHR(hr);

                AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo };
                hr = sampleGrabber.SetMediaType(type);
                DsError.ThrowExceptionForHR(hr);
                DsUtils.FreeAMMediaType(type);

                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.GetConnectedMediaType(new AMMediaType());

                sampleGrabber.SetCallback((ISampleGrabberCB)this, 1);
                hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter);
                DsError.ThrowExceptionForHR(hr);

                Marshal.ReleaseComObject(videoInput);
            }
        }
Esempio n. 50
0
        /// <summary> Set the options on the sample grabber </summary>
        private void ConfigureSampleGrabber(ISampleGrabber sampGrabber, int width, int height)
        {
            int hr;
            AMMediaType media = new AMMediaType();
            //VideoInfoHeader v;

            // copy out the videoinfoheader
            //v = new VideoInfoHeader();
            //Marshal.PtrToStructure(media.formatPtr, v);

            //// Set the size
            //v.BmiHeader.Width = width;
            //v.BmiHeader.Height = height;
            
            // Copy the media structure back
            //Marshal.StructureToPtr(v, media.formatPtr, false);

            // Set the media type to Video/RBG24
            media.majorType = MediaType.Video;
            media.subType = MediaSubType.RGB24;
            media.formatType = FormatType.VideoInfo;
            
            hr = sampGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;

            hr = sampGrabber.SetBufferSamples(false);
            hr = sampGrabber.SetOneShot(false);


            // Configure the samplegrabber callback
            hr = sampGrabber.SetCallback(this, 1);
            DsError.ThrowExceptionForHR(hr);
        }
Esempio n. 51
0
        void RunWorker()
        {
            try
            {

                graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;

                sourceObject = FilterInfo.CreateFilter(deviceMoniker);

                grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
                grabberObject = grabber as IBaseFilter;

                graph.AddFilter(sourceObject, "source");
                graph.AddFilter(grabberObject, "grabber");

                using (AMMediaType mediaType = new AMMediaType())
                {
                    mediaType.MajorType = MediaTypes.Video;
                    mediaType.SubType = MediaSubTypes.RGB32;
                    grabber.SetMediaType(mediaType);

                    if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0)
                    {
                        if (grabber.GetConnectedMediaType(mediaType) == 0)
                        {
                            VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));
                            capGrabber.Width = header.BmiHeader.Width;
                            capGrabber.Height = header.BmiHeader.Height;
                        }
                    }
                    graph.Render(grabberObject.GetPin(PinDirection.Output, 0));
                    grabber.SetBufferSamples(false);
                    grabber.SetOneShot(false);
                    grabber.SetCallback(capGrabber, 1);

                    IVideoWindow wnd = (IVideoWindow)graph;
                    wnd.put_AutoShow(false);
                    wnd = null;

                    control = (IMediaControl)graph;
                    control.Run();

                    while (!stopSignal.WaitOne(0, true))
                    {
                        Thread.Sleep(10);
                    }

                    control.StopWhenReady();
                }
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex);
            }
            finally
            {
                graph = null;
                sourceObject = null;
                grabberObject = null;
                grabber = null;
                capGrabber = null;
                control = null;

            }
        }
Esempio n. 52
0
        /*
        protected void InitAudioSampleGrabber()
        {
            // Get the graph builder
            IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);
            if (graphBuilder == null)
                return; 
            
            try
            {
                // Build the sample grabber
                sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
                    as ISampleGrabber;

                if (sampleGrabber == null)
                    return;

                // Add it to the filter graph
                int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber");
                DsError.ThrowExceptionForHR(hr);

                AMMediaType mtAudio = new AMMediaType();
                mtAudio.majorType = MediaType.Audio;
                mtAudio.subType = MediaSubType.PCM;
                mtAudio.formatPtr = IntPtr.Zero;

                _actualAudioFormat = null;

                hr = sampleGrabber.SetMediaType(mtAudio);
                DsError.ThrowExceptionForHR(hr);

                hr = sampleGrabber.SetBufferSamples(true);
                DsError.ThrowExceptionForHR(hr);

                hr = sampleGrabber.SetOneShot(false);
                DsError.ThrowExceptionForHR(hr);

                hr = sampleGrabber.SetCallback(this, 1);
                DsError.ThrowExceptionForHR(hr);

                sampleAnalyzerMustStop.Reset();
                sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop));
                sampleAnalyzerThread.Priority = ThreadPriority.Highest;
                sampleAnalyzerThread.Start();
            }
            catch(Exception ex)
            {
                Logger.LogException(ex);
            }

            rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
        }*/

        protected void InitAudioSampleGrabber_v2()
        {
            // Get the graph builder
            IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder);
            if (graphBuilder == null)
                return;

            try
            {
                // Build the sample grabber
                sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true))
                    as ISampleGrabber;

                if (sampleGrabber == null)
                    return;

                // Add it to the filter graph
                int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber_v2");
                DsError.ThrowExceptionForHR(hr);

                IBaseFilter ffdAudioDecoder = null;

                IPin ffdAudioDecoderOutput = null;
                IPin soundDeviceInput = null;
                IPin sampleGrabberInput = null;
                IPin sampleGrabberOutput = null;
                IntPtr pSoundDeviceInput = IntPtr.Zero;

                // When using FFDShow, typically we'll find
                // a ffdshow Audio Decoder connected to the sound device filter
                // 
                // i.e. [ffdshow Audio Decoder] --> [DirectSound Device]
                //
                // Our audio sample grabber supports only PCM sample input and output.
                // Its entire processing is based on this assumption.
                // 
                // Thus need to insert the audio sample grabber between the ffdshow Audio Decoder and the sound device
                // because this is the only place where we can find PCM samples. The sound device only accepts PCM.
                //
                // So we need to turn this graph:
                //
                // .. -->[ffdshow Audio Decoder]-->[DirectSound Device] 
                //
                // into this:
                //
                // .. -->[ffdshow Audio Decoder]-->[Sample grabber]-->[DirectSound Device] 
                //
                // Actions to do to achieve the graph change:
                //
                // 1. Locate the ffdshow Audio Decoder in the graph
                // 2. Find its output pin and the pin that it's connected to
                // 3. Locate the input and output pins of sample grabber
                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                // 6. Connect the sample grabber output to sound device input
                // that's all.

                // --------------
                // 1. Locate the ffdshow Audio Decoder in the graph
                hr = graphBuilder.FindFilterByName("ffdshow Audio Decoder", out ffdAudioDecoder);
                DsError.ThrowExceptionForHR(hr);

                // 2. Find its output pin and the pin that it's connected to
                hr = ffdAudioDecoder.FindPin("Out", out ffdAudioDecoderOutput);
                DsError.ThrowExceptionForHR(hr);

                hr = ffdAudioDecoderOutput.ConnectedTo(out pSoundDeviceInput);
                DsError.ThrowExceptionForHR(hr);

                soundDeviceInput = new DSPin(pSoundDeviceInput).Value;

                // 3. Locate the input and output pins of sample grabber
                hr = (sampleGrabber as IBaseFilter).FindPin("In", out sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                hr = (sampleGrabber as IBaseFilter).FindPin("Out", out sampleGrabberOutput);
                DsError.ThrowExceptionForHR(hr);

                // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin)
                hr = ffdAudioDecoderOutput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                hr = soundDeviceInput.Disconnect();
                DsError.ThrowExceptionForHR(hr);

                // 5. Connect the ffdshow Audio Decoder to sample grabber input
                hr = graphBuilder.Connect(ffdAudioDecoderOutput, sampleGrabberInput);
                DsError.ThrowExceptionForHR(hr);

                // 6. Connect the sample grabber output to sound device input
                hr = graphBuilder.Connect(sampleGrabberOutput, soundDeviceInput);
                DsError.ThrowExceptionForHR(hr);


                AMMediaType mtAudio = new AMMediaType();
                mtAudio.majorType = MediaType.Audio;
                mtAudio.subType = MediaSubType.PCM;
                mtAudio.formatPtr = IntPtr.Zero;

                _actualAudioFormat = null;

                sampleGrabber.SetMediaType(mtAudio);
                sampleGrabber.SetBufferSamples(true);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(this, 1);

                sampleAnalyzerMustStop.Reset();
                sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop));
                sampleAnalyzerThread.Priority = ThreadPriority.Highest;
                sampleAnalyzerThread.Start();
            }
            catch (Exception ex)
            {
                Logger.LogException(ex);
            }

            rotEntry = new DsROTEntry(graphBuilder as IFilterGraph);
        }
Esempio n. 53
0
    /// <summary>
    /// Connects to the property changed events of the camera settings.
    /// </summary>
    //private void Initialize()
    //{
    //    //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged;
    //    //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged;
    //    //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged;

    //    //stopwatch = new Stopwatch();
    //}

    /// <summary>
    /// Build the capture graph for grabber. 
    /// </summary>
    /// <param name="dev">The index of the new capture device.</param>
    /// <param name="frameRate">The framerate to use.</param>
    /// <param name="width">The width to use.</param>
    /// <param name="height">The height to use.</param>
    /// <returns>True, if successful, otherwise false.</returns>
    private bool SetupGraph(DsDevice dev, int frameRate, int width, int height)
    {
      int hr;
      fps = frameRate; // Not measured, only to expose FPS externally 
      cameraControl = null;
      capFilter = null;

      // Get the graphbuilder object
      graphBuilder = (IFilterGraph2)new FilterGraph();
      mediaControl = graphBuilder as IMediaControl;

      try
      {
        // Create the ICaptureGraphBuilder2
        capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

        // Create the SampleGrabber interface
        sampGrabber = (ISampleGrabber)new SampleGrabber();

        // Start building the graph
        hr = capGraph.SetFiltergraph(graphBuilder);
        //if (hr != 0)
        //    ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " +
        //                          DsError.GetErrorText(hr));

#if DEBUG
        this.rotEntry = new DsROTEntry(this.graphBuilder);
#endif

        this.capFilter = CreateFilter(
       FilterCategory.VideoInputDevice,
       dev.Name);
        if (this.capFilter != null)
        {
          hr = graphBuilder.AddFilter(this.capFilter, "Video Source");
          DsError.ThrowExceptionForHR(hr);
        }

        //// Add the video device
        //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter);
        //if (hr != 0)
        //    ErrorLogger.WriteLine(
        //        "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " +
        //        DsError.GetErrorText(hr));

        var baseGrabFlt = (IBaseFilter)sampGrabber;

        ConfigureSampleGrabber(sampGrabber);

        // Add the frame grabber to the graph
        hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");

        //if (hr != 0)
        //    ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " +
        //                          DsError.GetErrorText(hr));

        // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM
        /*
        if (!defaultMode)
        {
            m_icc = capFilter as IAMCameraControl;
            CameraControlFlags CamFlags = new CameraControlFlags();
            int pMin, pMax, pStep, pDefault;

            hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags);
            m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None);
        }
        */


        //IBaseFilter smartTee = new SmartTee() as IBaseFilter;

        //// Add the smart tee filter to the graph
        //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee");
        //Marshal.ThrowExceptionForHR(hr);

        // Connect the video source output to the smart tee
        //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee);

        hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt);
        var errorText = DsError.GetErrorText(hr);

        cameraControl = capFilter as IAMCameraControl;

        // Set videoProcAmp
        object obj;
        var iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770");
        DirectShowDevices.Instance.Cameras[deviceNumber].DirectshowDevice.Mon.BindToObject(
            null,
            null,
            ref iid_IBaseFilter,
            out obj);

        videoProcAmp = obj as IAMVideoProcAmp;

        // If any of the default config items are set
        if (frameRate + height + width > 0)
          SetConfigParms(capGraph, capFilter, frameRate, width, height);

        // Check for successful rendering, if this failed the class cannot be used, so dispose the resources and return false.
        if (hr < 0)
        {
          Cleanup();
          return false;
        }
        else
        {
          // Otherwise update the SampleGrabber.
          SaveSizeInfo(sampGrabber);
          hr = sampGrabber.SetBufferSamples(false);

          if (hr == 0)
          {
            hr = sampGrabber.SetOneShot(false);
            hr = sampGrabber.SetCallback(this, 1);
          }

          //if (hr < 0)
          //    ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()");
        }
      }
      catch (Exception)
      {
        //ErrorLogger.ProcessException(ex, false);

        Cleanup();
        return false;
      }

      return true;
    }