コード例 #1
0
        // ------------- Constructors/Destructors --------------
        /// <summary> 
        ///  Create a new Capture object. 
        ///  videoDevice and audioDevice can be null if you do not 
        ///  wish to capture both audio and video. However at least
        ///  one must be a valid device. Use the <see cref="Filters"/> 
        ///  class to list available devices.
        ///  </summary>
        public Capture2(Filter videoDevice, Filter audioDevice)
        {
            if (videoDevice == null)
                throw new ArgumentException("The videoDevice  parameter must be set to a valid Filter.\n");
            this.videoDevice = videoDevice;
            //if (videoDevice == null && audioDevice == null)
            //    throw new ArgumentException("The videoDevice and/or the audioDevice parameter must be set to a valid Filter.\n");
            //this.videoDevice = videoDevice;
            //  this.audioDevice = audioDevice;
            //this.Filename = getTempFilename();

            createGraph();
            /* Encoder parameters*/
            EncoderParameter myEncoderParameter = new EncoderParameter(myEncoder, 30L);
            myEncoderParameters.Param[0] = myEncoderParameter;
            jpgEncoder = GetEncoder(ImageFormat.Jpeg);
            //   time = Models.Utility.ModelUtility.CurrentTimeMillis();
            time = DateTime.Now;
            /**/
        }

        /// <summary> Destructor. Dispose of resources. </summary>
        ~Capture2()
        {
            Dispose();
        }

        #endregion Constructors

        #region Enumerations

        // ------------------ Private Enumerations --------------------
        /// <summary> Possible states of the interal filter graph </summary>
        protected enum GraphState
        {
            Null,			// No filter graph at all
            Created,		// Filter graph created with device filters added
            Rendered,		// Filter complete built, ready to run (possibly previewing)
            Capturing		// Filter is capturing
        }

        #endregion Enumerations

        #region Delegates

        public delegate void HeFrame(System.Drawing.Bitmap BM);

        #endregion Delegates

        #region Events

        public event HeFrame FrameEvent2;

        #endregion Events

        #region Properties

        /// <summary>
        ///  The capabilities of the audio device.
        /// </summary>
        /// <remarks>
        ///  It may be required to cue the capture (see <see cref="Cue"/>) 
        ///  before all capabilities are correctly reported. If you 
        ///  have such a device, the developer would be interested to
        ///  hear from you.
        /// 
        /// <para>
        ///  The information contained in this property is retrieved and
        ///  cached the first time this property is accessed. Future
        ///  calls to this property use the cached results. This was done 
        ///  for performance. </para>
        ///  
        /// <para>
        ///  However, this means <b>you may get different results depending 
        ///  on when you access this property first</b>. If you are experiencing 
        ///  problems, try accessing the property immediately after creating 
        ///  the Capture class or immediately after setting the video and 
        ///  audio compressors. Also, inform the developer. </para>
        /// </remarks>
        public AudioCapabilities AudioCaps
        {
            get
            {
                if (audioCaps == null)
                {
                    if (audioStreamConfig != null)
                    {
                        try
                        {
                            audioCaps = new AudioCapabilities(audioStreamConfig);
                        }
                        catch (Exception ex) { Debug.WriteLine("AudioCaps: unable to create audioCaps." + ex.ToString()); }
                    }
                }
                return (audioCaps);
            }
        }

        /// <summary>
        ///  Get or set the number of channels in the waveform-audio data. 
        /// </summary>
        /// <remarks>
        ///  Monaural data uses one channel and stereo data uses two channels. 
        ///  
        /// <para>
        ///  Not all devices support getting/setting this property.
        ///  If this property is not supported, accessing it will
        ///  throw and exception. </para>
        ///  
        /// <para>
        ///  This property cannot be changed while capturing. Changing 
        ///  this property while preview is enabled will cause some 
        ///  fickering while the internal filter graph is partially
        ///  rebuilt. Changing this property while cued will cancel the
        ///  cue. Call Cue() again to re-cue the capture. </para>
        /// </remarks>
        public short AudioChannels
        {
            get
            {
                short audioChannels = (short)getStreamConfigSetting(audioStreamConfig, "nChannels");
                return (audioChannels);
            }
            set
            {
                setStreamConfigSetting(audioStreamConfig, "nChannels", value);
            }
        }

        /// <summary> 
        ///  The audio compression filter. 
        /// </summary>
        /// <remarks>
        ///  When this property is changed 
        ///  the internal filter graph is rebuilt. This means that some properties
        ///  will be reset. Set this property as early as possible to avoid losing 
        ///  changes. This property cannot be changed while capturing.
        /// </remarks>
        public Filter AudioCompressor
        {
            get { return (audioCompressor); }
            set
            {
                assertStopped();
                destroyGraph();
                audioCompressor = value;
                renderGraph();
                startPreviewIfNeeded();
            }
        }

        /// <summary> 
        ///  The audio capture device filter. Read-only. To use a different 
        ///  device, dispose of the current Capture instance and create a new 
        ///  instance with the desired device. 
        /// </summary>
        public Filter AudioDevice
        {
            get { return (audioDevice); }
        }

        /// <summary>
        ///  Get or set the number of bits recorded per sample. 
        /// </summary>
        /// <remarks>
        ///  Common sample sizes are 8 bit and 16 bit. Not all
        ///  samples sizes are supported.
        ///  
        /// <para>
        ///  Not all devices support getting/setting this property.
        ///  If this property is not supported, accessing it will
        ///  throw and exception. </para>
        ///  
        /// <para>
        ///  This property cannot be changed while capturing. Changing 
        ///  this property while preview is enabled will cause some 
        ///  fickering while the internal filter graph is partially
        ///  rebuilt. Changing this property while cued will cancel the
        ///  cue. Call Cue() again to re-cue the capture. </para>
        /// </remarks>
        public short AudioSampleSize
        {
            get
            {
                short sampleSize = (short)getStreamConfigSetting(audioStreamConfig, "wBitsPerSample");
                return (sampleSize);
            }
            set
            {
                setStreamConfigSetting(audioStreamConfig, "wBitsPerSample", value);
            }
        }

        /// <summary>
        ///  Get or set the number of audio samples taken per second.
        /// </summary>
        /// <remarks>
        ///  Common sampling rates are 8.0 kHz, 11.025 kHz, 22.05 kHz, and 
        ///  44.1 kHz. Not all sampling rates are supported.
        ///  
        /// <para>
        ///  Not all devices support getting/setting this property.
        ///  If this property is not supported, accessing it will
        ///  throw and exception. </para>
        ///  
        /// <para>
        ///  This property cannot be changed while capturing. Changing 
        ///  this property while preview is enabled will cause some 
        ///  fickering while the internal filter graph is partially
        ///  rebuilt. Changing this property while cued will cancel the
        ///  cue. Call Cue() again to re-cue the capture. </para>
        /// </remarks>
        public int AudioSamplingRate
        {
            get
            {
                int samplingRate = (int)getStreamConfigSetting(audioStreamConfig, "nSamplesPerSec");
                return (samplingRate);
            }
            set
            {
                setStreamConfigSetting(audioStreamConfig, "nSamplesPerSec", value);
            }
        }

        /// <summary> 
        ///  The current audio source. Use Capture.AudioSources to 
        ///  list available sources. Set to null to disable all 
        ///  sources (mute).
        /// </summary>
        public Source AudioSource
        {
            get { return (AudioSources.CurrentSource); }
            set { AudioSources.CurrentSource = value; }
        }

        /// <summary> 
        ///  Collection of available audio sources/physical connectors 
        ///  on the current audio device. 
        /// </summary>
        /// <remarks>
        ///  In most cases, if the device has only one source, 
        ///  this collection will be empty. For audio
        ///  there are 2 different methods for enumerating audio sources
        ///  an audio crossbar (usually TV tuners?) or an audio mixer 
        ///  (usually sound cards?). This class will first look for an 
        ///  audio crossbar. If no sources or only one source is available
        ///  on the crossbar, this class will then look for an audio mixer.
        ///  This class does not support both methods.
        /// 
        /// <para>
        ///  The information contained in this property is retrieved and
        ///  cached the first time this property is accessed. Future
        ///  calls to this property use the cached results. This was done 
        ///  for performance. </para>
        ///  
        /// <para>
        ///  However, this means <b>you may get different results depending 
        ///  on when you access this property first</b>. If you are experiencing 
        ///  problems, try accessing the property immediately after creating 
        ///  the Capture class or immediately after setting the video and 
        ///  audio compressors. Also, inform the developer. </para>
        ///  </remarks>
        public SourceCollection AudioSources
        {
            get
            {
                if (audioSources == null)
                {
                    try
                    {
                        if (audioDevice != null)
                            audioSources = new SourceCollection(captureGraphBuilder, audioDeviceFilter, false);
                        else
                            audioSources = new SourceCollection();
                    }
                    catch (Exception ex) { Debug.WriteLine("AudioSources: unable to create AudioSources." + ex.ToString()); }
                }
                return (audioSources);
            }
        }

        // ------------------ Public Properties --------------------
        /// <summary> Is the class currently capturing. Read-only. </summary>
        public bool Capturing
        {
            get { return (graphState == GraphState.Capturing); }
        }

        /// <summary> Has the class been cued to begin capturing. Read-only. </summary>
        public bool Cued
        {
            get { return (isCaptureRendered && graphState == GraphState.Rendered); }
        }

        /// <summary>
        ///  Gets and sets the frame rate used to capture video.
        /// </summary>
        /// <remarks>
        ///  Common frame rates: 24 fps for film, 25 for PAL, 29.997
        ///  for NTSC. Not all NTSC capture cards can capture at 
        ///  exactly 29.997 fps. Not all frame rates are supported. 
        ///  When changing the frame rate, the closest supported 
        ///  frame rate will be used. 
        ///  
        /// <para>
        ///  Not all devices support getting/setting this property.
        ///  If this property is not supported, accessing it will
        ///  throw and exception. </para>
        ///  
        /// <para>
        ///  This property cannot be changed while capturing. Changing 
        ///  this property while preview is enabled will cause some 
        ///  fickering while the internal filter graph is partially
        ///  rebuilt. Changing this property while cued will cancel the
        ///  cue. Call Cue() again to re-cue the capture. </para>
        /// </remarks>
        public double FrameRate
        {
            get
            {
                long avgTimePerFrame = (long)getStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame");
                return ((double)10000000 / avgTimePerFrame);
            }
            set
            {
                long avgTimePerFrame = (long)(10000000 / value);
                setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame);
            }
        }

        /// <summary>
        ///  Gets and sets the frame size used to capture video.
        /// </summary>
        /// <remarks>
        ///  To change the frame size, assign a new Size object 
        ///  to this property <code>capture.Size = new Size( w, h );</code>
        ///  rather than modifying the size in place 
        ///  (capture.Size.Width = w;). Not all frame
        ///  rates are supported.
        ///  
        /// <para>
        ///  Not all devices support getting/setting this property.
        ///  If this property is not supported, accessing it will
        ///  throw and exception. </para>
        /// 
        /// <para> 
        ///  This property cannot be changed while capturing. Changing 
        ///  this property while preview is enabled will cause some 
        ///  fickering while the internal filter graph is partially
        ///  rebuilt. Changing this property while cued will cancel the
        ///  cue. Call Cue() again to re-cue the capture. </para>
        /// </remarks>
        public Size FrameSize
        {
            get
            {
                BitmapInfoHeader bmiHeader;
                bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                Size size = new Size(bmiHeader.Width, bmiHeader.Height);
                return (size);
            }
            set
            {
                BitmapInfoHeader bmiHeader;
                bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader");
                bmiHeader.Width = value.Width;
                bmiHeader.Height = value.Height;
                setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader);
            }
        }

        /// <summary> 
        ///  Name of file to capture to. Initially set to
        ///  a valid temporary file.
        /// </summary>		
        /// <remarks>
        ///  If the file does not exist, it will be created. If it does 
        ///  exist, it will be overwritten. An overwritten file will 
        ///  not be shortened if the captured data is smaller than the 
        ///  original file. The file will be valid, it will just contain 
        ///  extra, unused, data after the audio/video data. 
        /// 
        /// <para>
        ///  A future version of this class will provide a method to copy 
        ///  only the valid audio/video data to a new file. </para>
        /// 
        /// <para>
        ///  This property cannot be changed while capturing or cued. </para>
        /// </remarks> 
        //public string Filename
        //{
        //    get { return (filename); }
        //    set
        //    {
        //        assertStopped();
        //        if (Cued)
        //            throw new InvalidOperationException("The Filename cannot be changed once cued. Use Stop() before changing the filename.");
        //        filename = value;
        //        if (fileWriterFilter != null)
        //        {
        //            string s;
        //            AMMediaType mt = new AMMediaType();
        //            int hr = fileWriterFilter.GetCurFile(out s, mt);
        //            if (hr < 0) Marshal.ThrowExceptionForHR(hr);
        //            if (mt.formatSize > 0)
        //                Marshal.FreeCoTaskMem(mt.formatPtr);
        //            hr = fileWriterFilter.SetFileName(filename, mt);
        //            if (hr < 0) Marshal.ThrowExceptionForHR(hr);
        //        }
        //    }
        //}
        /// <summary>
        ///  The control that will host the preview window. 
        /// </summary>
        /// <remarks>
        ///  Setting this property will begin video preview
        ///  immediately. Set this property after setting all
        ///  other properties to avoid unnecessary changes
        ///  to the internal filter graph (some properties like
        ///  FrameSize require the internal filter graph to be 
        ///  stopped and disconnected before the property
        ///  can be retrieved or set).
        ///  
        /// <para>
        ///  To stop video preview, set this property to null. </para>
        /// </remarks>
        public Control PreviewWindow
        {
            get { return (previewWindow); }
            set
            {
                assertStopped();
                derenderGraph();
                previewWindow = value;
                wantPreviewRendered = ((previewWindow != null) && (videoDevice != null));
                renderGraph();
                startPreviewIfNeeded();
            }
        }

        /// <summary>
        ///  Available property pages. 
        /// </summary>
        /// <remarks>
        ///  These are property pages exposed by the DirectShow filters. 
        ///  These property pages allow users modify settings on the 
        ///  filters directly. 
        /// 
        /// <para>
        ///  The information contained in this property is retrieved and
        ///  cached the first time this property is accessed. Future
        ///  calls to this property use the cached results. This was done 
        ///  for performance. </para>
        ///  
        /// <para>
        ///  However, this means <b>you may get different results depending 
        ///  on when you access this property first</b>. If you are experiencing 
        ///  problems, try accessing the property immediately after creating 
        ///  the Capture class or immediately after setting the video and 
        ///  audio compressors. Also, inform the developer. </para>
        /// </remarks>
        public PropertyPageCollection PropertyPages
        {
            get
            {
                if (propertyPages == null)
                {
                    try
                    {
                        propertyPages = new PropertyPageCollection(
                            captureGraphBuilder,
                            videoDeviceFilter, audioDeviceFilter,
                            videoCompressorFilter, audioCompressorFilter,
                            VideoSources, AudioSources);
                    }
                    catch (Exception ex) { Debug.WriteLine("PropertyPages: unable to get property pages." + ex.ToString()); }

                }
                return (propertyPages);
            }
        }

        public System.Drawing.Bitmap SetBitmap
        {
            set
            {
                FrameEvent2(value);
            }
        }

        /// <summary> Is the class currently stopped. Read-only. </summary>
        public bool Stopped
        {
            get { return (graphState != GraphState.Capturing); }
        }

        /// <summary>
        ///  The TV Tuner or null if the current video device 
        ///  does not have a TV Tuner.
        /// </summary>
        public Tuner Tuner
        {
            get { return (tuner); }
        }

        /// <summary>
        ///  The capabilities of the video device.
        /// </summary>
        /// <remarks>
        ///  It may be required to cue the capture (see <see cref="Cue"/>) 
        ///  before all capabilities are correctly reported. If you 
        ///  have such a device, the developer would be interested to
        ///  hear from you.
        /// 
        /// <para>
        ///  The information contained in this property is retrieved and
        ///  cached the first time this property is accessed. Future
        ///  calls to this property use the cached results. This was done 
        ///  for performance. </para>
        ///  
        /// <para>
        ///  However, this means <b>you may get different results depending 
        ///  on when you access this property first</b>. If you are experiencing 
        ///  problems, try accessing the property immediately after creating 
        ///  the Capture class or immediately after setting the video and 
        ///  audio compressors. Also, inform the developer. </para>
        /// </remarks>
        public VideoCapabilities VideoCaps
        {
            get
            {
                if (videoCaps == null)
                {
                    if (videoStreamConfig != null)
                    {
                        try
                        {
                            videoCaps = new VideoCapabilities(videoStreamConfig);
                        }
                        catch (Exception ex) { Debug.WriteLine("VideoCaps: unable to create videoCaps." + ex.ToString()); }
                    }
                }
                return (videoCaps);
            }
        }

        /// <summary> 
        ///  The video compression filter. When this property is changed 
        ///  the internal filter graph is rebuilt. This means that some properties
        ///  will be reset. Set this property as early as possible to avoid losing 
        ///  changes. This property cannot be changed while capturing.
        /// </summary>
        public Filter VideoCompressor
        {
            get { return (videoCompressor); }
            set
            {
                assertStopped();
                destroyGraph();
                videoCompressor = value;
                renderGraph();
                startPreviewIfNeeded();
            }
        }

        /// <summary> 
        ///  The video capture device filter. Read-only. To use a different 
        ///  device, dispose of the current Capture instance and create a new 
        ///  instance with the desired device. 
        /// </summary>
        public Filter VideoDevice
        {
            get { return (videoDevice); }
        }

        /// <summary> 
        ///  The current video source. Use Capture.VideoSources to 
        ///  list available sources. Set to null to disable all 
        ///  sources (mute).
        /// </summary>
        public Source VideoSource
        {
            get { return (VideoSources.CurrentSource); }
            set { VideoSources.CurrentSource = value; }
        }

        /// <summary> 
        ///  Collection of available video sources/physical connectors 
        ///  on the current video device. 
        /// </summary>
        /// <remarks>
        ///  In most cases, if the device has only one source, 
        ///  this collection will be empty. 
        /// 
        /// <para>
        ///  The information contained in this property is retrieved and
        ///  cached the first time this property is accessed. Future
        ///  calls to this property use the cached results. This was done 
        ///  for performance. </para>
        ///  
        /// <para>
        ///  However, this means <b>you may get different results depending 
        ///  on when you access this property first</b>. If you are experiencing 
        ///  problems, try accessing the property immediately after creating 
        ///  the Capture class or immediately after setting the video and 
        ///  audio compressors. Also, inform the developer. </para>
        /// </remarks>
        public SourceCollection VideoSources
        {
            get
            {
                if (videoSources == null)
                {
                    try
                    {
                        if (videoDevice != null)
                            videoSources = new SourceCollection(captureGraphBuilder, videoDeviceFilter, true);
                        else
                            videoSources = new SourceCollection();
                    }
                    catch (Exception ex) { Debug.WriteLine("VideoSources: unable to create VideoSources." + ex.ToString()); }
                }
                return (videoSources);
            }
        }

        #endregion Properties

        #region Methods

        /// <summary>
        ///  Prepare for capturing. Use this method when capturing 
        ///  must begin as quickly as possible. 
        /// </summary>
        /// <remarks>
        ///  This will create/overwrite a zero byte file with 
        ///  the name set in the Filename property. 
        ///  
        /// <para>
        ///  This will disable preview. Preview will resume
        ///  once capture begins. This problem can be fixed
        ///  if someone is willing to make the change. </para>
        ///  
        /// <para>
        ///  This method is optional. If Cue() is not called, 
        ///  Start() will call it before capturing. This method
        ///  cannot be called while capturing. </para>
        /// </remarks>
        public void Cue()
        {
            assertStopped();

            // We want the capture stream rendered
            wantCaptureRendered = true;

            // Re-render the graph (if necessary)
            renderGraph();

            // Pause the graph
            int hr = mediaControl.Pause();
            if (hr != 0) Marshal.ThrowExceptionForHR(hr);
        }

        /// <summary> 
        ///  Calls Stop, releases all references. If a capture is in progress
        ///  it will be stopped, but the CaptureComplete event will NOT fire.
        /// </summary>
        public void Dispose()
        {
            wantPreviewRendered = false;
            wantCaptureRendered = false;
            //CaptureComplete = null;

            try { destroyGraph(); }
            catch { }

            if (videoSources != null)
                videoSources.Dispose(); videoSources = null;
            if (audioSources != null)
                audioSources.Dispose(); audioSources = null;
        }

        public void GrapImg()
        {
            Trace.Write("IMG");
            if (savedArray == null)
            {
                int size = videoInfoHeader.BmiHeader.ImageSize;
                if ((size < 1000) || (size > 16000000))
                    return;
                savedArray = new byte[size + 64000];
            }
            sampGrabber.SetCallback(this, 1);
        }

        int ISampleGrabberCB.BufferCB(double SampleTime, IntPtr pBuffer, int BufferLen)
        {
            bufferedSize = BufferLen;
            int w = videoInfoHeader.BmiHeader.Width;
            int h = videoInfoHeader.BmiHeader.Height;

            int stride = w * 3;
            //  return -1;
            Marshal.Copy(pBuffer, savedArray, 0, BufferLen);

            GCHandle handle = GCHandle.Alloc(savedArray, GCHandleType.Pinned);
            int scan0 = (int)handle.AddrOfPinnedObject();
            scan0 += (h - 1) * stride;
            Bitmap b = new Bitmap(w, h, -stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, (IntPtr)scan0);
            handle.Free();
            //  SetBitmap = b;
            numberofFrame++;
            TimeSpan timeDiff = DateTime.Now - time;
            if (timeDiff.TotalMilliseconds > 1000)
            {
                log.Debug("numberofFrame==>" + numberofFrame + "  modFactor==" + modFactor);
                if (numberofFrame <= 5)
                { modFactor = 1; }
                else if (numberofFrame <= 10)
                { modFactor = 3; }
                else if (numberofFrame <= 15)
                { modFactor = 4; }
                else if (numberofFrame <= 20)
                { modFactor = 5; }
                else
                { modFactor = 6; }
                numberofFrame = 0;
                time = DateTime.Now;
            }
            if (numberofFrame % 2 == 0)
            {
                try
                {
                    ms = new MemoryStream();
                    b.Save(ms, jpgEncoder, myEncoderParameters);
                    byte[] bmpBytes = ms.ToArray();

                   //  Console.WriteLine("Length==>" + bmpBytes.Length);
                    //    CallHelperMethods.AddAndStartThreadToProcessVideoImage(bmpBytes);
                    ms.Close();
                    //MainWindow.mainWindow.ChangeImage(bmpBytes);
                    bmpBytes = null;
                }
                catch (Exception ex)
                {
                    log.Error("Vedio image==>" + ex.StackTrace);
                }
            }
            b.Dispose();
            return 0;
        }

        int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample)
        {
            Trace.Write("Sample");
            return 0;
        }

        /// <summary> Begin capturing. </summary>
        public void Start()
        {
            assertStopped();

            // We want the capture stream rendered
            wantCaptureRendered = true;

            // Re-render the graph (if necessary)
            renderGraph();

            // Start the filter graph: begin capturing
            int hr = mediaControl.Run();
            if (hr != 0) Marshal.ThrowExceptionForHR(hr);

            // Update the state
            graphState = GraphState.Capturing;
        }

        /// <summary> 
        ///  Stop the current capture capture. If there is no
        ///  current capture, this method will succeed.
        /// </summary>
        public void Stop()
        {
            wantCaptureRendered = false;

            // Stop the graph if it is running
            // If we have a preview running we should only stop the
            // capture stream. However, if we have a preview stream
            // we need to re-render the graph anyways because we
            // need to get rid of the capture stream. To re-render
            // we need to stop the entire graph
            if (mediaControl != null)
            {
                mediaControl.Stop();
            }

            // Update the state
            if (graphState == GraphState.Capturing)
            {
                graphState = GraphState.Rendered;
                //if ( CaptureComplete != null )
                //	CaptureComplete( this, null );
            }

            // So we destroy the capture stream IF
            // we need a preview stream. If we don't
            // this will leave the graph as it is.
            try { renderGraph(); }
            catch { }
            try { startPreviewIfNeeded(); }
            catch { }
        }

        /// <summary>
        ///  Assert that the class is in a Stopped state.
        /// </summary>
        protected void assertStopped()
        {
            //if ( !Stopped )
            //throw new InvalidOperationException( "This operation not allowed while Capturing. Please Stop the current capture." );
        }

        // --------------------- Private Methods -----------------------
        /// <summary> 
        ///  Create a new filter graph and add filters (devices, compressors, 
        ///  misc), but leave the filters unconnected. Call renderGraph()
        ///  to connect the filters.
        /// </summary>
        protected void createGraph()
        {
            Guid cat;
            Guid med;
            int hr;
            Type comType = null;
            object comObj = null;

            // Ensure required properties are set
            if (videoDevice == null)
                throw new ArgumentException("The video device have not been set. Please set one or both to valid capture devices.\n");

            // Skip if we are already created
            if ((int)graphState < (int)GraphState.Created)
            {
                // Garbage collect, ensure that previous filters are released
                GC.Collect();

                // Make a new filter graph
                graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true));

                // Get the Capture Graph Builder
                Guid clsid = Clsid.CaptureGraphBuilder2;
                Guid riid = typeof(ICaptureGraphBuilder2).GUID;
                captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid);

                // Link the CaptureGraphBuilder to the filter graph
                hr = captureGraphBuilder.SetFiltergraph(graphBuilder);
                if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (comType == null)
                    throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!");
                comObj = Activator.CreateInstance(comType);
                sampGrabber = (ISampleGrabber)comObj; comObj = null;

                baseGrabFlt = (IBaseFilter)sampGrabber;

                // Add the graph to the Running Object Table so it can be
                // viewed with GraphEdit
            #if DEBUG
                DsROT.AddGraphToRot(graphBuilder, out rotCookie);
            #endif

                AMMediaType media = new AMMediaType();
                // Get the video device and add it to the filter graph
                if (VideoDevice != null)
                {
                    videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString);
                    hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);

                    media.majorType = MediaType.Video;
                   //  Console.WriteLine("MediaEnineCheck ==> Inside Capture2.cs before MediaSudType");
                    media.subType = MediaSubType.RGB24; //Rajib
                    media.formatType = FormatType.VideoInfo;		// ???

                    hr = sampGrabber.SetMediaType(media);
                    if (hr < 0)
                        Marshal.ThrowExceptionForHR(hr);

                    hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber");
                    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                }

                // Get the audio device and add it to the filter graph
                //if (AudioDevice != null)
                //{
                //    audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(AudioDevice.MonikerString);
                //    hr = graphBuilder.AddFilter(audioDeviceFilter, "Audio Capture Device");
                //    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                //}

                //// Get the video compressor and add it to the filter graph
                //if (VideoCompressor != null)
                //{
                //    videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(VideoCompressor.MonikerString);
                //    hr = graphBuilder.AddFilter(videoCompressorFilter, "Video Compressor");
                //    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                //}

                //// Get the audio compressor and add it to the filter graph
                //if (AudioCompressor != null)
                //{
                //    audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(AudioCompressor.MonikerString);
                //    hr = graphBuilder.AddFilter(audioCompressorFilter, "Audio Compressor");
                //    if (hr < 0) Marshal.ThrowExceptionForHR(hr);
                //}

                // Retrieve the stream control interface for the video device
                // FindInterface will also add any required filters
                // (WDM devices in particular may need additional
                // upstream filters to function).

                // Try looking for an interleaved media type
                object o;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                Guid iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);

                if (hr != 0)
                {
                    // If not found, try looking for a video media type
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);

                    if (hr != 0)
                        o = null;
                }
                videoStreamConfig = o as IAMStreamConfig;

                // Retrieve the stream control interface for the audio device
                o = null;
                cat = PinCategory.Capture;
                med = MediaType.Audio;
                iid = typeof(IAMStreamConfig).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, audioDeviceFilter, ref iid, out o);
                if (hr != 0)
                    o = null;
                audioStreamConfig = o as IAMStreamConfig;

                // Retreive the media control interface (for starting/stopping graph)
                mediaControl = (IMediaControl)graphBuilder;

                // Reload any video crossbars
                if (videoSources != null) videoSources.Dispose(); videoSources = null;

                // Reload any audio crossbars
                if (audioSources != null) audioSources.Dispose(); audioSources = null;

                // Reload any property pages exposed by filters
                if (propertyPages != null) propertyPages.Dispose(); propertyPages = null;

                // Reload capabilities of video device
                videoCaps = null;

                // Reload capabilities of video device
                audioCaps = null;

                // Retrieve TV Tuner if available
                o = null;
                cat = PinCategory.Capture;
                med = MediaType.Interleaved;
                iid = typeof(IAMTVTuner).GUID;
                hr = captureGraphBuilder.FindInterface(
                    ref cat, ref med, videoDeviceFilter, ref iid, out o);
                if (hr != 0)
                {
                    med = MediaType.Video;
                    hr = captureGraphBuilder.FindInterface(
                        ref cat, ref med, videoDeviceFilter, ref iid, out o);
                    if (hr != 0)
                        o = null;
                }
                IAMTVTuner t = o as IAMTVTuner;
                if (t != null)
                    tuner = new Tuner(t);

                videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
                Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero;
                //     StartSampleGrabberToSend();
                hr = sampGrabber.SetBufferSamples(false);
                if (hr == 0)
                    hr = sampGrabber.SetOneShot(false);
                if (hr == 0)
                    hr = sampGrabber.SetCallback(null, 0);
                if (hr < 0)
                    Marshal.ThrowExceptionForHR(hr);
                /*
                            // ----------- VMR 9 -------------------
                            //## check out samples\inc\vmrutil.h :: RenderFileToVMR9

                            IBaseFilter vmr = null;
                            if ( ( VideoDevice != null ) && ( previewWindow != null ) )
                            {
                                vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) );
                                hr = graphBuilder.AddFilter( vmr, "VMR" );
                                if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

                                IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr;
                                hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless );
                                if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

                                IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr;
                                hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle );
                                if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );
                            }
                            //-------------------------------------------

                            // ---------- SmartTee ---------------------

                            IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) );
                            hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" );
                            if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

                            // Video -> SmartTee
                            cat = PinCategory.Capture;
                            med = MediaType.Video;
                            hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, smartTeeFilter );
                            if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

                            // smarttee -> mux
                            cat = PinCategory.Capture;
                            med = MediaType.Video;
                            hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, muxFilter );
                            if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

                            // smarttee -> vmr
                            cat = PinCategory.Preview;
                            med = MediaType.Video;
                            hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, vmr );
                            if( hr < 0 ) Marshal.ThrowExceptionForHR( hr );

                            // -------------------------------------
                */
                // Update the state now that we are done
                graphState = GraphState.Created;

            }
コード例 #2
0
        // ------------- Constructors/Destructors --------------
        /// <summary> 
        ///  Create a new Capture object. 
        ///  videoDevice and audioDevice can be null if you do not 
        ///  wish to capture both audio and video. However at least
        ///  one must be a valid device. Use the <see cref="Filters"/> 
        ///  class to list available devices.
        ///  </summary>
        public StartVideoCapture(Filter videoDevice, Filter audioDevice)
        {
            if (videoDevice == null && audioDevice == null)
                throw new ArgumentException("The videoDevice and/or the audioDevice parameter must be set to a valid Filter.\n");
            this.videoDevice = videoDevice;
            this.audioDevice = audioDevice;
            this.Filename = getTempFilename();

            createGraph();
        }
コード例 #3
0
        /// <summary> Populate the InnerList with a list of filters from a particular category </summary>
        protected void GetFilters(Guid category)
        {
            int hr;
            object comObj = null;
            ICreateDevEnum enumDev = null;
            UCOMIEnumMoniker enumMon = null;
            UCOMIMoniker[] mon = new UCOMIMoniker[1];

            try
            {
                // Get the system device enumerator
                Type srvType = Type.GetTypeFromCLSID(Clsid.SystemDeviceEnum);
                if (srvType == null)
                {
                    throw new NotImplementedException("System Device Enumerator");
                }
                comObj = Activator.CreateInstance(srvType);
                enumDev = (ICreateDevEnum)comObj;

                // Create an enumerator to find filters in category
                hr = enumDev.CreateClassEnumerator(ref category, out enumMon, 0);
                if (hr != 0)
                {
                    throw new NotSupportedException("No devices of the category");
                }

                // Loop through the enumerator
                int f;
                do
                {
                    // Next filter
                    hr = enumMon.Next(1, mon, out f);
                    if ((hr != 0) || (mon[0] == null))
                    {
                        break;
                    }

                    // Add the filter
                    Filter filter = new Filter(mon[0]);
                    base.InnerList.Add(filter);

                    // Release resources
                    Marshal.ReleaseComObject(mon[0]);
                    mon[0] = null;
                }
                while (true);

                // Sort
                base.InnerList.Sort();
            }
            finally
            {
                enumDev = null;
                if (mon[0] != null)
                {
                    Marshal.ReleaseComObject(mon[0]);
                }
                mon[0] = null;
                if (enumMon != null)
                {
                    Marshal.ReleaseComObject(enumMon);
                }
                enumMon = null;
                if (comObj != null)
                {
                    Marshal.ReleaseComObject(comObj);
                }
                comObj = null;
            }
        }