Beispiel #1
0
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));

            m_videoWidth  = videoInfoHeader.BmiHeader.Width;
            m_videoHeight = videoInfoHeader.BmiHeader.Height;
            m_stride      = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Beispiel #2
0
        public static AMMediaType GetVideoMediaType(short bitCount, int width, int height)
        {
            Guid mediaSubType = GetMediaSubTypeForBitCount(bitCount);
            var VideoGroupType = new AMMediaType();

            VideoGroupType.majorType = MediaType.Video;
            VideoGroupType.subType = mediaSubType;
            VideoGroupType.formatType = FormatType.VideoInfo;
            VideoGroupType.fixedSizeSamples = true;

            VideoGroupType.formatSize = Marshal.SizeOf(typeof (VideoInfoHeader));
            var vif = new VideoInfoHeader();
            vif.BmiHeader = new BitmapInfoHeader();

            // The HEADER macro returns the BITMAPINFO within the VIDEOINFOHEADER
            vif.BmiHeader.Size = Marshal.SizeOf(typeof (BitmapInfoHeader));
            vif.BmiHeader.Compression = 0;
            vif.BmiHeader.BitCount = bitCount;
            vif.BmiHeader.Width = width;
            vif.BmiHeader.Height = height;
            vif.BmiHeader.Planes = 1;

            int iSampleSize = vif.BmiHeader.Width*vif.BmiHeader.Height*(vif.BmiHeader.BitCount/8);
            vif.BmiHeader.ImageSize = iSampleSize;
            VideoGroupType.sampleSize = iSampleSize;
            VideoGroupType.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vif));

            Marshal.StructureToPtr(vif, VideoGroupType.formatPtr, false);

            return VideoGroupType;
        }
Beispiel #3
0
    // Save the size parameters for use in SnapShot
    private void SaveSizeInfo(DirectShowLib.ISampleGrabber sampGrabber)
    {
        int hr;

        // Get the media type from the SampleGrabber
        DirectShowLib.AMMediaType media = new DirectShowLib.AMMediaType();

        hr = sampGrabber.GetConnectedMediaType(media);

        hr = sampGrabber.SetMediaType(media);
        DsError.ThrowExceptionForHR(hr);

        try
        {
            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Get the struct
            DirectShowLib.VideoInfoHeader videoInfoHeader = new DirectShowLib.VideoInfoHeader();
            Marshal.PtrToStructure(media.formatPtr, videoInfoHeader);

            // Grab the size info
            m_videoWidth  = videoInfoHeader.BmiHeader.Width;
            m_videoHeight = videoInfoHeader.BmiHeader.Height;
            m_stride      = m_videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);
            m_ImageSize   = m_videoWidth * m_videoHeight * 3;
        }
        finally
        {
            DirectShowLib.DsUtils.FreeAMMediaType(media);
            media = null;
        }
    }
Beispiel #4
0
 public bool Equals(VideoInfoHeader videoInfo)
 {
     if (videoInfo.BmiHeader.Width == this.Width &&
         videoInfo.BmiHeader.Height == this.Height &&
         videoInfo.BitRate == this.BitRate)
         return true;
     return false;
 }
 private ResolutionInfo(AMMediaType media)
 {
     var videoInfo = new VideoInfoHeader();
     Marshal.PtrToStructure(media.formatPtr, videoInfo);
     Width = videoInfo.BmiHeader.Width;
     Height = videoInfo.BmiHeader.Height;
     Bpp = videoInfo.BmiHeader.BitCount;
 }
 public VideoOutPinConfiguration( IBaseFilter filter, IPin pin, int format_id, VideoInfoHeader header )
 {
     this.filter = filter;
     this.pin = pin;
     this.width = header.BmiHeader.Width;
     this.height = header.BmiHeader.Height;
     this.fps = 10000000 / header.AvgTimePerFrame;
     this.format_id = format_id;
 }
Beispiel #7
0
    // Set the Framerate, and video size
    private void SetConfigParms(DirectShowLib.ICaptureGraphBuilder2 capGraph, DirectShowLib.IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight)
    {
        int    hr;
        object o;

        DirectShowLib.AMMediaType media;

        // Find the stream config interface
        hr = capGraph.FindInterface(
            DirectShowLib.PinCategory.Capture, DirectShowLib.MediaType.Video, capFilter, typeof(DirectShowLib.IAMStreamConfig).GUID, out o);

        DirectShowLib.IAMStreamConfig videoStreamConfig = o as DirectShowLib.IAMStreamConfig;
        if (videoStreamConfig == null)
        {
            throw new Exception("Failed to get IAMStreamConfig");
        }

        // Get the existing format block
        hr = videoStreamConfig.GetFormat(out media);
        DsError.ThrowExceptionForHR(hr);

        // copy out the videoinfoheader
        DirectShowLib.VideoInfoHeader v = new DirectShowLib.VideoInfoHeader();
        Marshal.PtrToStructure(media.formatPtr, v);

        // if overriding the framerate, set the frame rate
        if (iFrameRate > 0)
        {
            v.AvgTimePerFrame = 10000000 / iFrameRate;
        }

        // if overriding the width, set the width
        if (iWidth > 0)
        {
            v.BmiHeader.Width = iWidth;
        }

        // if overriding the Height, set the Height
        if (iHeight > 0)
        {
            v.BmiHeader.Height = iHeight;
        }

        // Copy the media structure back
        Marshal.StructureToPtr(v, media.formatPtr, false);

        // Set the new format
        hr = videoStreamConfig.SetFormat(media);
        DsError.ThrowExceptionForHR(hr);

        DirectShowLib.DsUtils.FreeAMMediaType(media);
        media = null;
    }
Beispiel #8
0
        // Set the Framerate, and video size
        private void SetConfigParms(IPin pStill, int iWidth, int iHeight, short iBPP)
        {
            int             hr;
            AMMediaType     media;
            VideoInfoHeader v;

            IAMStreamConfig videoStreamConfig = pStill as IAMStreamConfig;

            // Get the existing format block
            hr = videoStreamConfig.GetFormat(out media);
            DsError.ThrowExceptionForHR(hr);

            try
            {
                // copy out the videoinfoheader
                v = new VideoInfoHeader();
                Marshal.PtrToStructure(media.formatPtr, v);

                // if overriding the width, set the width
                if (iWidth > 0)
                {
                    v.BmiHeader.Width = iWidth;
                }

                // if overriding the Height, set the Height
                if (iHeight > 0)
                {
                    v.BmiHeader.Height = iHeight;
                }

                // if overriding the bits per pixel
                if (iBPP > 0)
                {
                    v.BmiHeader.BitCount = iBPP;
                }

                // Copy the media structure back
                Marshal.StructureToPtr(v, media.formatPtr, false);

                // Set the new format
                hr = videoStreamConfig.SetFormat(media);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }
Beispiel #9
0
        /// <summary>
        /// Gets the video resolution of a pin on a renderer.
        /// </summary>
        /// <param name="renderer">The renderer to inspect</param>
        /// <param name="direction">The direction the pin is</param>
        /// <param name="pinIndex">The zero based index of the pin to inspect</param>
        /// <returns>If successful a video resolution is returned.  If not, a 0x0 size is returned</returns>
        protected static Size GetVideoSize(IBaseFilter renderer, PinDirection direction, int pinIndex)
        {
            var size = new Size();

            var mediaType = new AMMediaType();
            IPin pin = DsFindPin.ByDirection(renderer, direction, pinIndex);

            if (pin == null)
                goto done;

            int hr = pin.ConnectionMediaType(mediaType);

            if (hr != 0)
                goto done;

            /* Check to see if its a video media type */
            if (mediaType.formatType != FormatType.VideoInfo2 &&
                mediaType.formatType != FormatType.VideoInfo)
            {
                goto done;
            }

            var videoInfo = new VideoInfoHeader();

            /* Read the video info header struct from the native pointer */
            Marshal.PtrToStructure(mediaType.formatPtr, videoInfo);

            Rectangle rect = videoInfo.SrcRect.ToRectangle();
            size = new Size(rect.Width, rect.Height);

        done:
            DsUtils.FreeAMMediaType(mediaType);

            if (pin != null)
                Marshal.ReleaseComObject(pin);
            return size;
        }
Beispiel #10
0
        /// <summary>
        /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice.
        /// </summary>
        /// <param name="FileName">The video file to open</param>
        /// <param name="graphicsDevice">XNA Graphics Device</param>
        public VideoPlayer(string FileName, GraphicsDevice graphicsDevice)
        {
            try
            {
                // Set video state
                currentState = VideoState.Stopped;

                // Store Filename
                filename = FileName;

                // Open DirectShow Interfaces
                InitInterfaces();

                // Create a SampleGrabber Filter and add it to the FilterGraph
                SampleGrabber sg = new SampleGrabber();
                ISampleGrabber sampleGrabber = (ISampleGrabber)sg;
                DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber"));

                // Setup Media type info for the SampleGrabber
                AMMediaType mt = new AMMediaType();

                //mt.majorType = MediaType.Video;     // Video
                mt.majorType = MEDIATYPE_Video;
                mt.subType = MediaSubType.RGB24;    // RGB24
                mt.formatType = FormatType.VideoInfo;   // VideoInfo

                DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt));

                // Construct the rest of the FilterGraph
                DsError.ThrowExceptionForHR(gb.RenderFile(filename, null));

                // Set SampleGrabber Properties
                DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true));
                DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false));
                DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1));

                // Hide Default Video Window
                IVideoWindow pVideoWindow = (IVideoWindow)gb;
                DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False));

                // Create AMMediaType to capture video information
                AMMediaType MediaType = new AMMediaType();
                DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType));
                VideoInfoHeader pVideoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader);

                // Store video information
                videoHeight = pVideoHeader.BmiHeader.Height;
                videoWidth = pVideoHeader.BmiHeader.Width;
                avgTimePerFrame = pVideoHeader.AvgTimePerFrame;
                bitRate = pVideoHeader.BitRate;
                DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration));

                // Create byte arrays to hold video data
                videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel)
                bgrData = new byte[(videoHeight * videoWidth) * 3];         // BGR24 format (3 bytes per pixel)

                // Create Output Frame Texture2D with the height and width of the video
                outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, false, SurfaceFormat.Color);
            }
            catch
            {
                throw new Exception("Unable to Load or Play the video file");
            }
        }
Beispiel #11
0
        public Bitmap getCap()
        {
            try
            {
                if (samplegrabber == null) return null;
                MemoryStream ms = new MemoryStream();
                VideoInfoHeader videoheader = new VideoInfoHeader();
                AMMediaType grab = new AMMediaType();

                samplegrabber.GetConnectedMediaType(grab);
                videoheader = (VideoInfoHeader)Marshal.PtrToStructure(grab.formatPtr, typeof(VideoInfoHeader));
                int width = videoheader.BmiHeader.Width;
                int height = videoheader.BmiHeader.Height;
                Bitmap b = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
                uint bytesPerPixel = (uint)(24 >> 3);
                uint extraBytes = ((uint)width * bytesPerPixel) % 4;
                uint adjustedLineSize = bytesPerPixel * ((uint)width + extraBytes);
                uint sizeOfImageData = (uint)(height) * adjustedLineSize;
                BitmapData bd1 = b.LockBits(new System.Drawing.Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, System.Drawing.Imaging.PixelFormat.Format24bppRgb);
                int bufsize = (int)sizeOfImageData;
                int n = samplegrabber.GetCurrentBuffer(ref bufsize, bd1.Scan0);
                b.UnlockBits(bd1);
                b.RotateFlip(RotateFlipType.RotateNoneFlipY);
                return b;
            }
            catch (Exception ex)
            {
                log(ex.ToString());
                return null;
            }
        }
Beispiel #12
0
        // Set the Framerate, and video size
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight)
        {
            int hr;
            object o;
            AMMediaType media;
            IAMStreamConfig videoStreamConfig;
            IAMVideoControl videoControl = capFilter as IAMVideoControl;

            // Find the stream config interface
            hr = capGraph.FindInterface(
                PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o );

            videoStreamConfig = o as IAMStreamConfig;
            try
            {
                if (videoStreamConfig == null)
                {
                    throw new Exception("Failed to get IAMStreamConfig");
                }

                hr = videoStreamConfig.GetFormat(out media);
                DsError.ThrowExceptionForHR( hr );

                // copy out the videoinfoheader
                VideoInfoHeader v = new VideoInfoHeader();
                Marshal.PtrToStructure( media.formatPtr, v );

                // if overriding the framerate, set the frame rate
                if (iFrameRate > 0)
                {
                    v.AvgTimePerFrame = 10000000 / iFrameRate;
                }

                // if overriding the width, set the width
                if (iWidth > 0)
                {
                    v.BmiHeader.Width = iWidth;
                }

                // if overriding the Height, set the Height
                if (iHeight > 0)
                {
                    v.BmiHeader.Height = iHeight;
                }

                // Copy the media structure back
                Marshal.StructureToPtr( v, media.formatPtr, false );

                // Set the new format
                hr = videoStreamConfig.SetFormat( media );
                DsError.ThrowExceptionForHR( hr );

                DsUtils.FreeAMMediaType(media);
                media = null;

                // Fix upsidedown video
                if (videoControl != null)
                {
                    VideoControlFlags pCapsFlags;

                    IPin pPin = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0);
                    hr = videoControl.GetCaps(pPin, out pCapsFlags);
                    DsError.ThrowExceptionForHR( hr );

                    if ((pCapsFlags & VideoControlFlags.FlipVertical) > 0)
                    {
                        hr = videoControl.GetMode(pPin, out pCapsFlags);
                        DsError.ThrowExceptionForHR( hr );

                        hr = videoControl.SetMode(pPin, 0);
                    }
                }
            }
            finally
            {
                Marshal.ReleaseComObject(videoStreamConfig);
            }
        }
Beispiel #13
0
        private void SetupGraph(Control hWin, string filename)
        {
            pGraphBuilder   = (DirectShowLib.IGraphBuilder) new FilterGraph();
            pMediaControl   = (DirectShowLib.IMediaControl)pGraphBuilder;
            pVideoWindow    = (DirectShowLib.IVideoWindow)pGraphBuilder;
            pVideoFrameStep = (DirectShowLib.IVideoFrameStep)pGraphBuilder; // video frame...
            pMediaPosition  = (DirectShowLib.IMediaPosition)pGraphBuilder;
            //DirectShowLib.IBaseFilter pBaseFilter = (DirectShowLib.IBaseFilter)pGraphBuilder;

            //pMediaSeeking = (DirectShowLib.IMediaSeeking)pGraphBuilder;
            //pMediaSeeking.SetPositions(5000, AMSeekingSeekingFlags.AbsolutePositioning, 6000, AMSeekingSeekingFlags.AbsolutePositioning);

            //test
            DirectShowLib.ICaptureGraphBuilder2  pCaptureGraphBuilder2;
            DirectShowLib.IBaseFilter            pRenderer;
            DirectShowLib.IVMRFilterConfig9      pIVMRFilterConfig9;
            DirectShowLib.IVMRWindowlessControl9 pVMRWC9;

            pCaptureGraphBuilder2 = (DirectShowLib.ICaptureGraphBuilder2) new CaptureGraphBuilder2();
            pCaptureGraphBuilder2.SetFiltergraph(pGraphBuilder);     // CaptureGraph를  GraphBuilder에 붙인다.

            //pGraphBuilder.AddFilter(pMediaControl "SDZ 375 Source");  // GraphBuilder에 영상장치필터를 추가한다.
            pRenderer          = (DirectShowLib.IBaseFilter) new DirectShowLib.VideoMixingRenderer9(); // 믹서 필터를 생성 한다.
            pIVMRFilterConfig9 = (DirectShowLib.IVMRFilterConfig9)pRenderer;                           // 믹서 필터의 속성을 설정한다.
            pIVMRFilterConfig9.SetRenderingMode(VMR9Mode.Windowless);
            pIVMRFilterConfig9.SetNumberOfStreams(2);

            pVMRWC9 = (DirectShowLib.IVMRWindowlessControl9)pRenderer;              // 오버레이 평면의 속성을 설정한다.
            pVMRWC9.SetVideoClippingWindow(hWin.Handle);
            pVMRWC9.SetBorderColor(0);
            pVMRWC9.SetVideoPosition(null, hWin.ClientRectangle);
            pGraphBuilder.AddFilter(pRenderer, "Video Mixing Renderer");                               // GraphBuilder에 믹스 필터를 추가한다.
            pCaptureGraphBuilder2.RenderStream(null, MediaType.Video, pGraphBuilder, null, pRenderer); // 영상표시를 위한 필터를 설정한다.
            ///test

            //sampleGrabber
            AMMediaType am_media_type = new AMMediaType();

            pSampleGrabber           = (DirectShowLib.ISampleGrabber) new SampleGrabber();
            pSampleGrabberFilter     = (DirectShowLib.IBaseFilter)pSampleGrabber;
            am_media_type.majorType  = MediaType.Video;
            am_media_type.subType    = MediaSubType.RGB24;
            am_media_type.formatType = FormatType.VideoInfo;
            pSampleGrabber.SetMediaType(am_media_type);
            //Graph에 sampleGrabber filter를 추가
            pGraphBuilder.AddFilter(pSampleGrabberFilter, "Sample Grabber");

            pMediaControl.RenderFile(filename);

            pVideoWindow.put_Owner(hWin.Handle);
            pVideoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings);
            Rectangle rect = hWin.ClientRectangle;

            pVideoWindow.SetWindowPosition(0, 0, rect.Right, rect.Bottom);

            //sampleGrabber2
            pSampleGrabber.GetConnectedMediaType(am_media_type);
            DirectShowLib.VideoInfoHeader pVideoInfoHeader = (DirectShowLib.VideoInfoHeader)Marshal.PtrToStructure(am_media_type.formatPtr, typeof(VideoInfoHeader));
            String str = string.Format("size = {0} x {1}", pVideoInfoHeader.BmiHeader.Width, pVideoInfoHeader.BmiHeader.Height);

            Video_Width   = pVideoInfoHeader.BmiHeader.Width;
            Video_Height  = pVideoInfoHeader.BmiHeader.Height;
            str          += string.Format("sample size = {0}", am_media_type.sampleSize);
            textBox1.Text = str;
            DsUtils.FreeAMMediaType(am_media_type);
            //SetBufferSamples를 실행하지 않으면 버퍼로부터 데이터를 얻을 수 없다.
            //불필요하게 부하를 주고싶지 않은경우 false, 데이터를 얻고싶으면 true
            pSampleGrabber.SetBufferSamples(true);

            //play time
            pMediaPosition = (DirectShowLib.IMediaPosition)pGraphBuilder;
            double Length;

            pMediaPosition.get_Duration(out Length);
            String str2 = string.Format("play time: {0}", Length);

            textBox1.Text = str2;
            pMediaPosition.put_CurrentPosition(5.0); //set current Position



            //2017.05.08
            DirectShowLib.IVMRWindowlessControl9 windowlessCtrl = (DirectShowLib.IVMRWindowlessControl9)pRenderer;
            windowlessCtrl.SetVideoClippingWindow(hWin.Handle);
            IntPtr lpDib;

            windowlessCtrl.GetCurrentImage(out lpDib);
            BitmapInfoHeader head;

            head = (BitmapInfoHeader)Marshal.PtrToStructure(lpDib, typeof(BitmapInfoHeader));
            int         width       = head.Width;
            int         height      = head.Height;
            int         stride      = width * (head.BitCount / 8);
            PixelFormat pixelFormat = PixelFormat.Format24bppRgb;

            switch (head.BitCount)
            {
            case 24: pixelFormat = PixelFormat.Format24bppRgb; break;

            case 32: pixelFormat = PixelFormat.Format32bppRgb; break;

            case 48: pixelFormat = PixelFormat.Format48bppRgb; break;

            default: throw new Exception("Unknown BitCount");
            }

            Bitmap Cap = new Bitmap(width, height, stride, pixelFormat, lpDib);

            Cap.RotateFlip(RotateFlipType.RotateNoneFlipY);
            pictureBox1.Image = Cap;
        }
Beispiel #14
0
    /// <summary>
    /// Our chance to allocate any storage we may need
    /// </summary>
    /// <returns>Returns always S_OK</returns>
    protected override int InternalAllocateStreamingResources()
    {
      // Reinitialize variables
      for (int i = 0; i < InputPinCount; i++)
      {
        InternalDiscontinuity(i);
      }

      for (int i = 0; i < InputPinCount; i++)
      {
        AMMediaType mediaType = InputType(i);
        VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
        Marshal.PtrToStructure(mediaType.formatPtr, videoInfoHeader);
        this.inputStreams[i].StreamWidth = videoInfoHeader.BmiHeader.Width;
        this.inputStreams[i].StreamHeight = videoInfoHeader.BmiHeader.Height;
        this.inputStreams[i].StreamBBP = videoInfoHeader.BmiHeader.BitCount / 8;
        this.inputStreams[i].StreamStride = videoInfoHeader.BmiHeader.Width * this.inputStreams[i].StreamBBP;
        this.inputStreams[i].BufferTimeStamp = 0;
      }

      AMMediaType outputMediaType = OutputType(0);
      VideoInfoHeader outputVideoInfoHeader = new VideoInfoHeader();
      Marshal.PtrToStructure(outputMediaType.formatPtr, outputVideoInfoHeader);
      this.outputStream.StreamWidth = outputVideoInfoHeader.BmiHeader.Width;
      this.outputStream.StreamHeight = outputVideoInfoHeader.BmiHeader.Height;
      this.outputStream.StreamBBP = outputVideoInfoHeader.BmiHeader.BitCount / 8;
      this.outputStream.StreamStride = outputVideoInfoHeader.BmiHeader.Width * this.outputStream.StreamBBP;
      this.outputStream.BufferTimeStamp = 0;

      return SOK;
    }
Beispiel #15
0
        // Save the size parameters for use in SnapShot
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            try
            {

                if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
                {
                    throw new NotSupportedException("Unknown Grabber Media Format");
                }

                // Get the struct
                VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
                Marshal.PtrToStructure(media.formatPtr, videoInfoHeader);

                // Grab the size info
                m_videoWidth = videoInfoHeader.BmiHeader.Width;
                m_videoHeight = videoInfoHeader.BmiHeader.Height;
                m_stride = videoInfoHeader.BmiHeader.ImageSize / m_videoHeight;
                m_ImageSize = videoInfoHeader.BmiHeader.ImageSize;
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }
Beispiel #16
0
        // Set the Framerate, and video size
        private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight)
        {
            int hr;
            object o;
            AMMediaType media;

            // Find the stream config interface
            hr = capGraph.FindInterface(
                PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o );

            IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;
            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            // Get the existing format block
            hr = videoStreamConfig.GetFormat( out media);
            DsError.ThrowExceptionForHR( hr );

            // copy out the videoinfoheader
            VideoInfoHeader v = new VideoInfoHeader();
            Marshal.PtrToStructure( media.formatPtr, v );

            // if overriding the framerate, set the frame rate
            if (iFrameRate > 0)
            {
                v.AvgTimePerFrame = 10000000 / iFrameRate;
            }

            // if overriding the width, set the width
            if (iWidth > 0)
            {
                v.BmiHeader.Width = iWidth;
            }

            // if overriding the Height, set the Height
            if (iHeight > 0)
            {
                v.BmiHeader.Height = iHeight;
            }

            // Copy the media structure back
            Marshal.StructureToPtr( v, media.formatPtr, false );

            // Set the new format
            hr = videoStreamConfig.SetFormat( media );
            DsError.ThrowExceptionForHR( hr );

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Beispiel #17
0
        /// <summary>
        /// Checks if AMMediaType's resolution is appropriate for desired resolution.
        /// </summary>
        /// <param name="media_type">Media type to analyze.</param>
        /// <param name="resolution_desired">Desired resolution. Can be null or have 0 for height or width if it's not important.</param>
        private static bool IsResolutionAppropiate(AMMediaType media_type, Resolution resolution_desired)
        {
            // if we were asked to choose resolution
            if (resolution_desired == null)
                return true;

            VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
            Marshal.PtrToStructure(media_type.formatPtr, videoInfoHeader);

            if (resolution_desired.Width > 0 &&
                videoInfoHeader.BmiHeader.Width != resolution_desired.Width)
            {
                return false;
            }
            if (resolution_desired.Height > 0 &&
                videoInfoHeader.BmiHeader.Height != resolution_desired.Height)
            {
                return false;
            }

            return true;
        }
        /// <summary>
        /// Sets the capture parameters for the video capture device
        /// </summary>
        private bool SetVideoCaptureParameters(ICaptureGraphBuilder2 capGraph, IBaseFilter captureFilter, Guid mediaSubType)
        {
            /* The stream config interface */
            object streamConfig;

            /* Get the stream's configuration interface */
            int hr = capGraph.FindInterface(PinCategory.Capture,
                                            MediaType.Video,
                                            captureFilter,
                                            typeof(IAMStreamConfig).GUID,
                                            out streamConfig);

            DsError.ThrowExceptionForHR(hr);

            var videoStreamConfig = streamConfig as IAMStreamConfig;

            /* If QueryInterface fails... */
            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            /* The media type of the video */
            AMMediaType media;

            /* Get the AMMediaType for the video out pin */
            hr = videoStreamConfig.GetFormat(out media);
            DsError.ThrowExceptionForHR(hr);

            /* Make the VIDEOINFOHEADER 'readable' */
            var videoInfo = new VideoInfoHeader();
            Marshal.PtrToStructure(media.formatPtr, videoInfo);

            /* Setup the VIDEOINFOHEADER with the parameters we want */
            videoInfo.AvgTimePerFrame = DSHOW_ONE_SECOND_UNIT / FPS;
            videoInfo.BmiHeader.Width = DesiredWidth;
            videoInfo.BmiHeader.Height = DesiredHeight;

            if (mediaSubType != Guid.Empty)
            {
                int fourCC = 0;
                byte[] b = mediaSubType.ToByteArray();
                fourCC = b[0];
                fourCC |= b[1] << 8;
                fourCC |= b[2] << 16;
                fourCC |= b[3] << 24;

                videoInfo.BmiHeader.Compression = fourCC;
                media.subType = mediaSubType;
            }

            /* Copy the data back to unmanaged memory */
            Marshal.StructureToPtr(videoInfo, media.formatPtr, false);

            /* Set the format */
            hr = videoStreamConfig.SetFormat(media);

            /* We don't want any memory leaks, do we? */
            DsUtils.FreeAMMediaType(media);

            if (hr < 0)
                return false;

            return true;
        }
Beispiel #19
0
        /// <summary>
        /// Get resoltuin from if AMMediaType's resolution is appropriate for resolution_desired
        /// </summary>
        /// <param name="media_type">Media type to analyze.</param>
        /// <param name="resolution_desired">Desired resolution. Can be null or have 0 for height or width if it's not important.</param>
        private static Resolution GetResolutionForMediaType(AMMediaType media_type)
        {
            VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
            Marshal.PtrToStructure(media_type.formatPtr, videoInfoHeader);

            return new Resolution(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height);
        }
Beispiel #20
0
        private void BuildGraph(DirectShowLib.DsDevice dsDevice)
        {
            int hr = 0;
            pGraph = new FilterGraph() as IFilterGraph2;

            //graph builder
            ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2();

            try
            {
                hr = pBuilder.SetFiltergraph(pGraph);
                DsError.ThrowExceptionForHR(hr);

                // Add camera
                IBaseFilter camera;
                //hr = pGraph.FindFilterByName(dsDevice.Name, out camera);
                hr = ((IFilterGraph2)pGraph).AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out camera);
                DsError.ThrowExceptionForHR(hr);

                hr = pGraph.AddFilter(camera, "camera");
                DsError.ThrowExceptionForHR(hr);

                // Set format for camera
                AMMediaType pmt = new AMMediaType();
                pmt.majorType = MediaType.Video;
                pmt.subType = MediaSubType.YUY2;
                pmt.formatType = FormatType.VideoInfo;
                pmt.fixedSizeSamples = true;
                pmt.formatSize = 88;
                pmt.sampleSize = 829440;
                pmt.temporalCompression = false;
                VideoInfoHeader format = new VideoInfoHeader();
                format.SrcRect = new DsRect();
                format.TargetRect = new DsRect();
                format.BitRate = 20736000;
                format.AvgTimePerFrame = 400000;
                format.BmiHeader = new BitmapInfoHeader();
                format.BmiHeader.Size = 40;
                format.BmiHeader.Width = 720;
                format.BmiHeader.Height = 576;
                format.BmiHeader.Planes = 1;
                format.BmiHeader.BitCount = 24;
                format.BmiHeader.Compression = 844715353;
                format.BmiHeader.ImageSize = 827440;
                pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format));
                Marshal.StructureToPtr(format, pmt.formatPtr, false);
                hr = ((IAMStreamConfig)DsFindPin.ByCategory(camera, PinCategory.Capture, 0)).SetFormat(pmt);
                //hr = ((IAMStreamConfig)GetPin(pUSB20Camera, "Capture")).SetFormat(pmt);
                DsUtils.FreeAMMediaType(pmt);
                DsError.ThrowExceptionForHR(hr);

                IAMCrossbar crossBar = null;
                object dummy;
                hr = pBuilder.FindInterface(PinCategory.Capture, MediaType.Video, camera, typeof(IAMCrossbar).GUID, out dummy);
                if( hr >=0)
                {
                    crossBar = (IAMCrossbar)dummy;
                    int oPin, iPin;
                    int ovLink, ivLink;
                    ovLink = ivLink = 0;
                    crossBar.get_PinCounts(out oPin, out iPin);
                    int pIdxRel;
                    PhysicalConnectorType physicalConType;
                    for (int i = 0; i < iPin; i++)
                    {
                        crossBar.get_CrossbarPinInfo(true, i, out pIdxRel, out physicalConType);
                        if (physicalConType == PhysicalConnectorType.Video_Composite)
                            ivLink = i;
                    }
                    for (int i = 0; i < oPin; i++)
                    {
                        crossBar.get_CrossbarPinInfo(false, i, out pIdxRel, out physicalConType);
                        if (physicalConType == PhysicalConnectorType.Video_VideoDecoder)
                            ovLink = i;
                    }

                    try
                    {
                        crossBar.Route(ovLink, ivLink);
                    }
                    catch
                    {

                        throw new Exception("Failed to get IAMCrossbar");
                    }
                }

                //add AVI Decompressor
                IBaseFilter pAVIDecompressor = (IBaseFilter)new AVIDec();
                hr = pGraph.AddFilter(pAVIDecompressor, "AVI Decompressor");

                //add color space converter
                IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour();
                hr = pGraph.AddFilter(pColorSpaceConverter, "Color space converter");
                DsError.ThrowExceptionForHR(hr);

                // Connect camera and AVI Decomp
                hr = pGraph.ConnectDirect(DsFindPin.ByCategory(camera, PinCategory.Capture, 0), DsFindPin.ByName(pAVIDecompressor, "XForm In"), null);
                DsError.ThrowExceptionForHR(hr);

                // Connect AVI Decomp and color space converter
                hr = pGraph.ConnectDirect(DsFindPin.ByName(pAVIDecompressor, "XForm Out"), DsFindPin.ByName(pColorSpaceConverter, "Input"), null);
                DsError.ThrowExceptionForHR(hr);

                //add SampleGrabber
                //IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber));
                //hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber");
                IBaseFilter sampleGrabber = new SampleGrabber() as IBaseFilter;
                hr = pGraph.AddFilter(sampleGrabber, "Sample grabber");
                DsError.ThrowExceptionForHR(hr);

                // Configure the samplegrabber
                AMMediaType pSampleGrabber_pmt = new AMMediaType();
                pSampleGrabber_pmt.majorType = MediaType.Video;
                pSampleGrabber_pmt.subType = MediaSubType.ARGB32;
                pSampleGrabber_pmt.formatType = FormatType.VideoInfo;
                pSampleGrabber_pmt.fixedSizeSamples = true;
                pSampleGrabber_pmt.formatSize = 88;
                pSampleGrabber_pmt.sampleSize = 1658880;
                pSampleGrabber_pmt.temporalCompression = false;
                VideoInfoHeader pSampleGrabber_format = new VideoInfoHeader();
                pSampleGrabber_format.SrcRect = new DsRect();
                pSampleGrabber_format.SrcRect.right = 720;
                pSampleGrabber_format.SrcRect.bottom = 576;
                pSampleGrabber_format.TargetRect = new DsRect();
                pSampleGrabber_format.TargetRect.right = 720;
                pSampleGrabber_format.TargetRect.bottom = 576;
                pSampleGrabber_format.BitRate = 331776000;
                pSampleGrabber_format.AvgTimePerFrame = 400000;
                pSampleGrabber_format.BmiHeader = new BitmapInfoHeader();
                pSampleGrabber_format.BmiHeader.Size = 40;
                pSampleGrabber_format.BmiHeader.Width = 720;
                pSampleGrabber_format.BmiHeader.Height = 576;
                pSampleGrabber_format.BmiHeader.Planes = 1;
                pSampleGrabber_format.BmiHeader.BitCount = 32;
                pSampleGrabber_format.BmiHeader.ImageSize = 1658880;

                pSampleGrabber_pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(pSampleGrabber_format));
                Marshal.StructureToPtr(pSampleGrabber_format, pSampleGrabber_pmt.formatPtr, false);
                hr = ((ISampleGrabber)sampleGrabber).SetMediaType(pSampleGrabber_pmt);
                DsError.ThrowExceptionForHR(hr);

                //connect MJPG dec and SampleGrabber
                //hr = pGraph.ConnectDirect(GetPin(pMJPGDecompressor, "XForm Out"), GetPin(pSampleGrabber, "Input"), null);
                hr = pGraph.ConnectDirect(DsFindPin.ByName(pColorSpaceConverter, "XForm Out"), DsFindPin.ByName(sampleGrabber, "Input"), null);
                DsError.ThrowExceptionForHR(hr);

                //set callback
                hr = ((ISampleGrabber)sampleGrabber).SetCallback(this, 1);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                // Clean this mess up!
            }
        }
Beispiel #21
0
    /// <summary>
    /// Set the Framerate, and video size
    /// </summary>
    /// <param name="videoStreamConfig">The <see cref="IAMStreamConfig"/> of the capture device.</param>
    /// <param name="frameRate">The new framerate to be used.</param>
    /// <param name="width">The new video width to be used.</param>
    /// <param name="height">The new video height to be used.</param>
    private void SetConfigParms(
      IAMStreamConfig videoStreamConfig,
      int frameRate,
      int width,
      int height)
    {
      int hr;
      AMMediaType media = null;

      if (videoStreamConfig == null)
      {
        throw new ArgumentNullException("Error in DXCapture.SetConfigParams(). Failed to get IAMStreamConfig");
      }

      // Get the existing format block
      hr = videoStreamConfig.GetFormat(out media);
      DsError.ThrowExceptionForHR(hr);

      // copy out the videoinfoheader
      var v = new VideoInfoHeader();
      Marshal.PtrToStructure(media.formatPtr, v);

      // if overriding set values
      if (frameRate > 0)
        v.AvgTimePerFrame = 10000000 / frameRate;

      if (width > 0)
        v.BmiHeader.Width = width;

      if (height > 0)
        v.BmiHeader.Height = height;

      // Copy the media structure back
      Marshal.StructureToPtr(v, media.formatPtr, true);

      // Set the new format
      if (videoStreamConfig != null)
      {
        hr = videoStreamConfig.SetFormat(media);
        DsError.ThrowExceptionForHR(hr);
      }

      DsUtils.FreeAMMediaType(media);
      media = null;
    }
Beispiel #22
0
        /// <summary> Read and store the properties </summary>
        private void SaveSizeInfo(ISampleGrabber sampGrabber)
        {
            int hr;

            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();
            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
            Marshal.PtrToStructure(media.formatPtr, videoInfoHeader);

            //VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            videoInfoHeader.BmiHeader.Width = 320;
            videoInfoHeader.BmiHeader.Height = 240;
            videoWidth = videoInfoHeader.BmiHeader.Width;
            videoHeight = videoInfoHeader.BmiHeader.Height;
            fStride = videoWidth * (videoInfoHeader.BmiHeader.BitCount / 8);
            fPixelArray = new byte[fStride * videoHeight];

            // Copy the media structure back
            Marshal.StructureToPtr(videoInfoHeader, media.formatPtr, false);
            hr = sampleGrabber.SetMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
        /// <summary>
        /// Initializes the resolution.
        /// </summary>
        /// <param name="capGraph">The cap graph.</param>
        /// <param name="capFilter">The cap filter.</param>
        void InitializeResolution(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter)
        {
            AMMediaType mediaType = null;
            IAMStreamConfig videoStreamConfig = null;
            IntPtr ptr;
            int iCount = 0;
            int iSize = 0;
            int maxWidth = 0;
            int maxHeight = 0;
            int streamID = 0;
            object obj;

            capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out obj);
            videoStreamConfig = obj as IAMStreamConfig;
            videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
            ptr = Marshal.AllocCoTaskMem(iSize);

            for (int i = 0; i < iCount; i++)
            {
                videoStreamConfig.GetStreamCaps(i, out mediaType, ptr);
                VideoInfoHeader videoInfo = new VideoInfoHeader();
                Marshal.PtrToStructure(mediaType.formatPtr, videoInfo);
                if (videoInfo.BmiHeader.Width > maxWidth && videoInfo.BmiHeader.Height > maxHeight)
                {
                    streamID = i;
                    maxWidth = videoInfo.BmiHeader.Width;
                    maxHeight = videoInfo.BmiHeader.Height;
                }
            }

            videoStreamConfig.GetStreamCaps(streamID, out mediaType, ptr);
            int hr = videoStreamConfig.SetFormat(mediaType);
            Marshal.FreeCoTaskMem(ptr);

            DsError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(mediaType);
            mediaType = null;
        }
Beispiel #24
0
        public Size GetCaptureScreenSize()
        {
            AMMediaType mt;
            var hr = AMStreamConfig.GetFormat(out mt);

            DsError.ThrowExceptionForHR(hr);
            // copy out the videoinfoheader
            VideoInfoHeader v = new VideoInfoHeader();
            Marshal.PtrToStructure(mt.formatPtr, v);

            return new Size(v.BmiHeader.Width, v.BmiHeader.Height);
        }
Beispiel #25
0
    /// <summary>
    /// Set the Framerate, and video size
    /// </summary>
    /// <param name="capGraph">The <see cref="ICaptureGraphBuilder2"/> interface.</param>
    /// <param name="capFilter">The <see cref="IBaseFilter"/> of the capture device.</param>
    /// <param name="frameRate">The new framerate to be used.</param>
    /// <param name="width">The new video width to be used.</param>
    /// <param name="height">The new video height to be used.</param>
    private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int frameRate, int width,
                                int height)
    {
      int hr;
      object o;
      AMMediaType media = null;

      // Find the stream config interface
      hr = this.capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter,
                                       typeof(IAMStreamConfig).GUID, out o);

      videoControl = capFilter as IAMVideoControl;
      videoStreamConfig = o as IAMStreamConfig;

      //if (videoStreamConfig == null)
      //    ErrorLogger.WriteLine("Error in Capture.SetConfigParams(). Failed to get IAMStreamConfig");

      // Get the existing format block
      if (videoStreamConfig != null) hr = videoStreamConfig.GetFormat(out media);

      //if (hr != 0)
      //    ErrorLogger.WriteLine("Could not SetConfigParms in Camera.Capture. Message: " + DsError.GetErrorText(hr));

      // copy out the videoinfoheader
      var v = new VideoInfoHeader();
      Marshal.PtrToStructure(media.formatPtr, v);

      // if overriding set values
      if (frameRate > 0)
      {
        v.AvgTimePerFrame = 10000000 / frameRate;
        this.fps = frameRate;
      }
      else
      {
        this.fps = (int)(10000000 / v.AvgTimePerFrame);
      }

      if (width > 0)
        v.BmiHeader.Width = width;

      if (height > 0)
        v.BmiHeader.Height = height;

      // Copy the media structure back
      Marshal.StructureToPtr(v, media.formatPtr, true);

      // Set the new format
      if (videoStreamConfig != null) hr = videoStreamConfig.SetFormat(media);
      //if (hr != 0)
      //    ErrorLogger.WriteLine(
      //        "Error while setting new camera format (videoStreamConfig) in Camera.Capture. Message: " +
      //        DsError.GetErrorText(hr));

      DsUtils.FreeAMMediaType(media);
      media = null;
    }
Beispiel #26
0
        /// <summary>
        /// Get bit count for mediatype
        /// </summary>
        /// <param name="media_type">Media type to analyze.</param>
        private static short GetBitCountForMediaType(AMMediaType media_type)
        {

            VideoInfoHeader videoInfoHeader = new VideoInfoHeader();
            Marshal.PtrToStructure(media_type.formatPtr, videoInfoHeader);

            return videoInfoHeader.BmiHeader.BitCount;
        }
Beispiel #27
0
        private void OpenVideoFile()
        {
            AMMediaType mediaType = null;

            try
            {
                EnsureMediaDet();

                // retrieve some measurements from the video
                m_MediaDet.get_FrameRate(out m_FrameRate);

                mediaType = new AMMediaType();
                m_MediaDet.get_StreamMediaType(mediaType);
                m_VideoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader));
                DsUtils.FreeAMMediaType(mediaType);
                mediaType = null;

                ImageWidth = m_VideoInfo.BmiHeader.Width;
                ImageHeight = m_VideoInfo.BmiHeader.Height;

                m_MediaDet.get_StreamLength(out m_MediaLength);
                m_FrameCount = (int)(m_FrameRate * m_MediaLength);

                m_MediaDet.GetBitmapBits(0, out m_BufferSize, IntPtr.Zero, ImageWidth, ImageHeight);
                m_BufferPtr = Marshal.AllocHGlobal(m_BufferSize);
            }
            catch (Exception ex)
            {
                if (mediaType != null)
                    DsUtils.FreeAMMediaType(mediaType);

                throw;
            }
        }
        int ISampleGrabberCB.SampleCB(double sampleTime, IMediaSample pSample)
        {
            var mediaType = new AMMediaType();

            /* We query for the media type the sample grabber is using */
            int hr = m_sampleGrabber.GetConnectedMediaType(mediaType);

            var videoInfo = new VideoInfoHeader();

            /* 'Cast' the pointer to our managed struct */
            Marshal.PtrToStructure(mediaType.formatPtr, videoInfo);

            /* The stride is "How many bytes across for each pixel line (0 to width)" */
            int stride = Math.Abs(videoInfo.BmiHeader.Width * (videoInfo.BmiHeader.BitCount / 8 /* eight bits per byte */));
            int width = videoInfo.BmiHeader.Width;
            int height = videoInfo.BmiHeader.Height;

            if (m_videoFrame == null)
                InitializeBitmapFrame(width, height);

            if (m_videoFrame == null)
                return 0;

            BitmapData bmpData = m_videoFrame.LockBits(new Rectangle(0, 0, width, height),
                                                       ImageLockMode.ReadWrite,
                                                       PixelFormat.Format24bppRgb);

            /* Get the pointer to the pixels */
            IntPtr pBmp = bmpData.Scan0;

            IntPtr samplePtr;

            /* Get the native pointer to the sample */
            pSample.GetPointer(out samplePtr);

            int pSize = stride * height;

            /* Copy the memory from the sample pointer to our bitmap pixel pointer */
            CopyMemory(pBmp, samplePtr, pSize);

            m_videoFrame.UnlockBits(bmpData);

            InvokeNewVideoSample(new VideoSampleArgs { VideoFrame = m_videoFrame });

            DsUtils.FreeAMMediaType(mediaType);

            /* Dereference the sample COM object */
            Marshal.ReleaseComObject(pSample);
            return 0;
        }
Beispiel #29
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="captureGraph"></param>
        /// <param name="captureFilter"></param>
        private void InitConfigParams(ICaptureGraphBuilder2 captureGraph, IBaseFilter captureFilter)
        {
            object obj;
            AMMediaType media;

            int hr = captureGraph.FindInterface(PinCategory.Capture, MediaType.Video, captureFilter, typeof(IAMStreamConfig).GUID, out obj);
            IAMStreamConfig videoStreamConfig = obj as IAMStreamConfig;
            if (videoStreamConfig == null)
            {
                throw new Exception("Failed to get IAMStreamConfig");
            }

            // Get the existing format block
            hr = videoStreamConfig.GetFormat(out media);
            DsError.ThrowExceptionForHR(hr);

            // copy out the videoinfoheader
            VideoInfoHeader infoHeader = new VideoInfoHeader();
            Marshal.PtrToStructure(media.formatPtr, infoHeader);

            if (mFrameRate > 0)
            {
                infoHeader.AvgTimePerFrame = 10000000 / mFrameRate;
            }

            if (mWidth > 0)
            {
                infoHeader.BmiHeader.Width = mWidth;
            }

            if (mHeight > 0)
            {
                infoHeader.BmiHeader.Height = mHeight;
            }

            // Copy the media structure back
            Marshal.StructureToPtr(infoHeader, media.formatPtr, false);

            // Set the new format
            hr = videoStreamConfig.SetFormat(media);
            DsError.ThrowExceptionForHR(hr);

            DsUtils.FreeAMMediaType(media);
            media = null;
        }
Beispiel #30
0
        private void SetConfigParms(ICaptureGraphBuilder2 capBuilder, IBaseFilter capFilter, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight)
        {
            object o;
            AMMediaType media;
            IAMStreamConfig videoStreamConfig;
            IAMVideoControl videoControl = capFilter as IAMVideoControl;

            int hr = capBuilder.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            videoStreamConfig = o as IAMStreamConfig;
            try
            {
                if (videoStreamConfig == null)
                {
                    throw new Exception("Failed to get IAMStreamConfig");
                }

                int iCount = 0, iSize = 0;
                hr = videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize);
                DsError.ThrowExceptionForHR(hr);

                VideoInfoHeader vMatching = null;
                VideoFormatHelper.SupportedVideoFormat entry = null;

                IntPtr taskMemPointer = Marshal.AllocCoTaskMem(iSize);

                AMMediaType pmtConfig = null;
                for (int iFormat = 0; iFormat < iCount; iFormat++)
                {
                    IntPtr ptr = IntPtr.Zero;

                    hr = videoStreamConfig.GetStreamCaps(iFormat, out pmtConfig, taskMemPointer);
                    DsError.ThrowExceptionForHR(hr);

                    vMatching = (VideoInfoHeader)Marshal.PtrToStructure(pmtConfig.formatPtr, typeof(VideoInfoHeader));

                    if (vMatching.BmiHeader.BitCount > 0)
                    {
                        entry = new VideoFormatHelper.SupportedVideoFormat()
                        {
                            Width = vMatching.BmiHeader.Width,
                            Height = vMatching.BmiHeader.Height,
                            BitCount = vMatching.BmiHeader.BitCount,
                            FrameRate = 10000000.0 / vMatching.AvgTimePerFrame
                        };

                        if (entry.Matches(selectedFormat))
                        {
                            // WE FOUND IT !!!
                            break;
                        }
                    }

                    vMatching = null;
                }

                if (vMatching != null)
                {
                    hr = videoStreamConfig.SetFormat(pmtConfig);
                    DsError.ThrowExceptionForHR(hr);

                    iFrameRate = 10000000/vMatching.AvgTimePerFrame;
                    iWidth = vMatching.BmiHeader.Width;
                    iHeight = vMatching.BmiHeader.Height;
                }
                else
                {
                    hr = videoStreamConfig.GetFormat(out media);
                    DsError.ThrowExceptionForHR(hr);

                    // Copy out the videoinfoheader
                    VideoInfoHeader v = new VideoInfoHeader();
                    Marshal.PtrToStructure(media.formatPtr, v);

                    if (selectedFormat != null && iWidth == 0 && iHeight == 0)
                    {
                        // Use the config from the selected format
                        iWidth = selectedFormat.Width;
                        iHeight = selectedFormat.Height;
                        iFrameRate = (float) selectedFormat.FrameRate;
                    }

                    // If overriding the framerate, set the frame rate
                    if (iFrameRate > 0)
                    {
                        int newAvgTimePerFrame = (int)Math.Round(10000000 / iFrameRate);
                        Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.AvgTimePerFrame from {0} to {1}", v.AvgTimePerFrame, newAvgTimePerFrame));
                        v.AvgTimePerFrame = newAvgTimePerFrame;
                    }
                    else
                        iFrameRate = 10000000 / v.AvgTimePerFrame;

                    // If overriding the width, set the width
                    if (iWidth > 0)
                    {
                        Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.BmiHeader.Width from {0} to {1}", v.BmiHeader.Width, iWidth));
                        v.BmiHeader.Width = iWidth;
                    }
                    else
                        iWidth = v.BmiHeader.Width;

                    // If overriding the Height, set the Height
                    if (iHeight > 0)
                    {
                        Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.BmiHeader.Height from {0} to {1}", v.BmiHeader.Height, iHeight));
                        v.BmiHeader.Height = iHeight;
                    }
                    else
                        iHeight = v.BmiHeader.Height;

                    // Copy the media structure back
                    Marshal.StructureToPtr(v, media.formatPtr, false);

                    // Set the new format
                    hr = videoStreamConfig.SetFormat(media);
                    try
                    {
                        DsError.ThrowExceptionForHR(hr);
                    }
                    catch (Exception ex)
                    {
                        // If setting the format failed then log the error but try to continue
                        Trace.WriteLine(ex.GetFullStackTrace());
                    }

                    DsUtils.FreeAMMediaType(media);
                    media = null;
                }

                Marshal.FreeCoTaskMem(taskMemPointer);
                DsUtils.FreeAMMediaType(pmtConfig);
                pmtConfig = null;

                // Fix upsidedown video
                if (videoControl != null)
                {
                    // NOTE: Flipping detection and fixing doesn't seem to work!

                    //IPin pPin = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0);
                    //VideoFormatHelper.FixFlippedVideo(videoControl, pPin);

                    //pPin = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0);
                    //VideoFormatHelper.FixFlippedVideo(videoControl, pPin);
                }
            }
            finally
            {
                Marshal.ReleaseComObject(videoStreamConfig);
            }
        }
        /**
         * 必要に応じてグラフオブジェクトを全て削除する。
         */
        private void CleanupGraphiObjects()
        {
            this.m_video_info = null;
            if (m_FilterGraph != null)
            {
                Marshal.ReleaseComObject(m_FilterGraph);
                m_FilterGraph = null;
            }

            if (m_VidControl != null)
            {
                Marshal.ReleaseComObject(m_VidControl);
                m_VidControl = null;
            }

            if (m_pinStill != null)
            {
                Marshal.ReleaseComObject(m_pinStill);
                m_pinStill = null;
            }
        }
        // Set the Framerate, and video size
        private void SetConfigParms(IPin pStill, int iWidth, int iHeight, short iBPP)
        {
            int hr;
            AMMediaType media;
            VideoInfoHeader v;

            IAMStreamConfig videoStreamConfig = pStill as IAMStreamConfig;

            // Get the existing format block
            hr = videoStreamConfig.GetFormat(out media);
            DsError.ThrowExceptionForHR(hr);

            try
            {
                // copy out the videoinfoheader
                v = new VideoInfoHeader();
                Marshal.PtrToStructure(media.formatPtr, v);

                // if overriding the width, set the width
                if (iWidth > 0)
                {
                    v.BmiHeader.Width = iWidth;
                }

                // if overriding the Height, set the Height
                if (iHeight > 0)
                {
                    v.BmiHeader.Height = iHeight;
                }

                // if overriding the bits per pixel
                if (iBPP > 0)
                {
                    v.BmiHeader.BitCount = iBPP;
                }

                // Copy the media structure back
                Marshal.StructureToPtr(v, media.formatPtr, false);

                // Set the new format
                hr = videoStreamConfig.SetFormat(media);
                DsError.ThrowExceptionForHR(hr);
            }
            finally
            {
                DsUtils.FreeAMMediaType(media);
                media = null;
            }
        }
        private void upateVideoInfo(ISampleGrabber sampGrabber)
        {
            // Get the media type from the SampleGrabber
            AMMediaType media = new AMMediaType();

            int hr;
            hr = sampGrabber.GetConnectedMediaType(media);
            DsError.ThrowExceptionForHR(hr);

            if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero))
            {
                throw new NotSupportedException("Unknown Grabber Media Format");
            }

            // Grab the size info
            this.m_video_info = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader));
            //pUnkを含むいくつかのフィールドは無効化される。
            DsUtils.FreeAMMediaType(media);
            this._capture_mediatype = media;
        }
        void InitResolution(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int targetWidth, int targetHeight)
        {
            object o;
            capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o);

            AMMediaType media = null;
            IAMStreamConfig videoStreamConfig = o as IAMStreamConfig;
            IntPtr ptr;
            int iC = 0, iS = 0;

            videoStreamConfig.GetNumberOfCapabilities(out iC, out iS);
            ptr = Marshal.AllocCoTaskMem(iS);
            int bestDWidth = 999999;
            int bestDHeight = 999999;
            int streamID = 0;
            for (int i = 0; i < iC; i++)
            {
                videoStreamConfig.GetStreamCaps(i, out media, ptr);
                VideoInfoHeader v;
                v = new VideoInfoHeader();
                Marshal.PtrToStructure(media.formatPtr, v);
                int dW = Math.Abs(targetWidth - v.BmiHeader.Width);
                int dH = Math.Abs(targetHeight - v.BmiHeader.Height);
                if (dW < bestDWidth && dH < bestDHeight)
                {
                    streamID = i;
                    bestDWidth = dW;
                    bestDHeight = dH;
                }
            }

            videoStreamConfig.GetStreamCaps(streamID, out media, ptr);
            int hr = videoStreamConfig.SetFormat(media);
            Marshal.FreeCoTaskMem(ptr);

            DsError.ThrowExceptionForHR(hr);
            DsUtils.FreeAMMediaType(media);
            media = null;
        }
        DSStreamResultCodes InitWithVideoFile(WTVStreamingVideoRequest strq)
        {
            UsingSBEFilter = false;  // Not using stream buffer

            // Init variables
            IPin[] pin = new IPin[1];
            string dPin = string.Empty;
            string sName = string.Empty;
            string dName = string.Empty;
            string sPin = string.Empty;
            FileInfo fiInputFile = new FileInfo(strq.FileName);
            string txtOutputFNPath = fiInputFile.FullName + ".wmv";
            if (
                (fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) ||
                (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms"))
               ) return DSStreamResultCodes.ErrorInvalidFileType;

            int hr = 0;
            try
            {
                // Get the graphbuilder interface
                SendDebugMessage("Creating Graph Object", 0);
                IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph;

                // Create an ASF writer filter
                SendDebugMessage("Creating ASF Writer", 0);
                WMAsfWriter asf_filter = new WMAsfWriter();
                dc.Add(asf_filter); // CHECK FOR ERRORS
                currentOutputFilter = (IBaseFilter)asf_filter;  // class variable
                // Add the ASF filter to the graph
                hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer");
                DsError.ThrowExceptionForHR(hr);

                // Set the filename
                SendDebugMessage("Setting filename", 0);
                IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter;
                string destPathFN = fiInputFile.FullName + ".wmv";
                hr = sinkFilter.SetFileName(destPathFN, null);
                DsError.ThrowExceptionForHR(hr);

                // Handy to have an ACM Wrapper filter hanging around for AVI files with MP3 audio
                SendDebugMessage("Adding ACM Wrapper", 0);
                IBaseFilter ACMFilter =  FilterDefinition.AddToFilterGraph(FilterDefinitions.Other.ACMWrapperFilter, ref graphbuilder);
                dc.Add(ACMFilter);

                // Render file - then build graph
                SendDebugMessage("Rendering file", 0);
                graphbuilder.RenderFile(fiInputFile.FullName, null);
                SendDebugMessage("Saving graph", 0);
                FilterGraphTools.SaveGraphFile(graphbuilder, "C:\\ProgramData\\RemotePotato\\lastfiltergraph.grf");

                // Are both our ASF pins connected?
                IPin ASFVidInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null);
                IPin ASFAudInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null);

                // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant]
                SendDebugMessage("Run graph for testing purposes", 0);
                IMediaControl tempControl = (IMediaControl)graphbuilder;
                IMediaEvent tempEvent = (IMediaEvent)graphbuilder;
                DsError.ThrowExceptionForHR(tempControl.Pause());
                EventCode pEventCode;
                hr = tempEvent.WaitForCompletion(1000, out pEventCode);

                // Get media type from vid input pin for ASF writer
                AMMediaType pmt = new AMMediaType();
                hr = ASFVidInputPin.ConnectionMediaType(pmt);

                FrameSize SourceFrameSize = null;
                if (pmt.formatType == FormatType.VideoInfo2)
                {
                    // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder
                    VideoInfoHeader2 pvih2 = new VideoInfoHeader2();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih2);
                    SourceFrameSize = new FrameSize(pvih2.BmiHeader.Width, pvih2.BmiHeader.Height);
                }
                else if (pmt.formatType == FormatType.VideoInfo)  //{05589f80-c356-11ce-bf01-00aa0055595a}
                {
                    VideoInfoHeader pvih = new VideoInfoHeader();
                    Marshal.PtrToStructure(pmt.formatPtr, pvih);
                    SourceFrameSize = new FrameSize(pvih.BmiHeader.Width, pvih.BmiHeader.Height);
                }
                else
                    SourceFrameSize = new FrameSize(200, 200); // SQUARE

                // Stop graph if necessary
                FilterState pFS;
                hr = tempControl.GetState(1000, out pFS);
                if (pFS != FilterState.Stopped)
                    DsError.ThrowExceptionForHR(tempControl.Stop());
                // Free up media type
                DsUtils.FreeAMMediaType(pmt); pmt = null;

                // (re)Configure the ASF writer with the selected WM Profile
                ConfigureASFWriter(asf_filter, strq, SourceFrameSize);

                // Release pins
                SendDebugMessage("Releasing COM objects (pins)", 0);
                // source
                Marshal.ReleaseComObject(ASFVidInputPin); ASFVidInputPin = null;
                Marshal.ReleaseComObject(ASFAudInputPin); ASFAudInputPin = null;
            }
            catch (Exception ex)
            {
                SendDebugMessageWithException(ex.Message, ex);
                return DSStreamResultCodes.ErrorExceptionOccurred;
            }

            return DSStreamResultCodes.OK;
        }