Exemplo n.º 1
0
        /// <summary>
        /// Allocates the storage used by the buffer.  After this method call
        /// the buffer is ready to be started.
        /// </summary>
        /// <param name="mt"></param>
        /// <returns></returns>
        /// Can throw OutOfMemoryException.
        public bool Create(UW.CSE.MDShow.MediaType mt)
        {
            myMediaType = mt;

            UW.CSE.MDShow.MediaTypeVideoInfo vi = myMediaType.ToMediaTypeVideoInfo();
            FrameSize     = vi.VideoInfo.BitmapInfo.SizeImage;
            FrameDuration = (uint)vi.VideoInfo.AvgTimePerFrame;

            Debug.Assert(FrameSize > 0, "VideoBuffer received bogus media type");

            //Come up with an integer number of frames per second.  There will be some
            // round-off error which should be ignored, Then we'll ceil up to the next int.
            // In fact it turns out we can't really trust the VideoFrameDuration value provided
            // by the filter graph.  We will just do this as a sanity check, but to get the
            // actual frame rate, we will need to count samples we receive.

            double numerator = 10000000;
            double denom     = FrameDuration;
            double fps       = numerator / denom;
            int    ifps      = (int)fps * 10000;   // throw away an estimated roundoff error

            fps  = ifps / 10000.0;
            ifps = (int)Math.Ceiling(fps);             // if it's still not an integer, err on the high side.

            Debug.Assert(ifps <= MAX_FPS, "VideoBuffer assumes " + MAX_FPS.ToString() + "fps or less");
            Debug.WriteLine("VideoBuffer.Create calculated fps=" + ifps.ToString() + " framesize=" + FrameSize.ToString());

            this.estimatedFps = (ifps <= 30) ? 30 : MAX_FPS;

            // Start assuming we'll use the maximum buffer size
            FrameCount = MAX_BUFFER / FrameSize;
            if (FrameCount > (this.estimatedFps * SIZE_IN_SECONDS))
            {
                // Scale it back so as not to overkill if framesize is small enough
                FrameCount = this.estimatedFps * SIZE_IN_SECONDS;
            }

            //If we can't get as much memory as we initially request, try scaling back up to a point.
            while (true)
            {
                try {
                    Buffer = new BufferChunk((int)(FrameSize * FrameCount));
                    break;
                }
                catch (OutOfMemoryException) {
                    if (FrameCount <= this.estimatedFps)
                    {
                        throw;
                    }
                    FrameCount = (uint)((double)FrameCount * 0.7);
                    Debug.WriteLine("Warning: VideoBuffer failed to get requested memory.  Scaling buffer down to " + FrameCount.ToString() + " frames.");
                }
            }

            Buffer.Length = (int)(FrameSize * FrameCount);
            PresTime      = new ulong[FrameCount];

            WriteOffset    = 0;
            ReadOffset     = 0;
            FramesReady    = 0;
            streamStopTime = 0;
            TotalFrames    = 0;
            started        = false;
            sampleReceived = false;
            GotVideoPeriod = false;
            bufferOverrun  = false;
            return(true);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Copy the LST managed MediaType to the Windows Media Interop type
        /// </summary>
        /// <param name="mt"></param>
        /// <returns></returns>
        private _WMMediaType ConvertMediaType(UW.CSE.MDShow.MediaType mt)
        {
            _WMMediaType wmt = new _WMMediaType();

            if (mt == null)
            {
                return(wmt);
            }

            if (mt.MajorType == UW.CSE.MDShow.MajorType.Video)
            {
                // Basic video settings:
                //int w=320;
                //int h=240;
                //int fps=30;

                // For RGB24:
                //ushort bpp=24;
                //uint comp=0;
                //GUID stype = WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_RGB24);

                // ..or for I420:
                //WORD bpp=12;
                //DWORD comp=0x30323449;
                //GUID stype= WMMEDIASUBTYPE_I420;

                // Settings for the video stream:
                // BITMAPINFOHEADER
                //  DWORD  biSize = size of the struct in bytes.. 40
                //	LONG   biWidth - Frame width
                //	LONG   biHeight	- height could be negative indicating top-down dib.
                //	WORD   biPlanes - must be 1.
                //	WORD   biBitCount 24 in our sample with RGB24
                //	DWORD  biCompression 0 for RGB
                //	DWORD  biSizeImage in bytes.. biWidth*biHeight*biBitCount/8
                //	LONG   biXPelsPerMeter 0
                //	LONG   biYPelsPerMeter 0;
                //	DWORD  biClrUsed must be 0
                //	DWORD  biClrImportant 0
                //
                //	notes:
                //		biCompression may be a packed 'fourcc' code, for example I420 is 0x30323449, IYUV = 0x56555949...
                //		I420 and IYUV are identical formats.  They use 12 bits per pixel, and are planar,  comprised of
                //		nxm Y plane followed by n/2 x m/2 U and V planes.  Each plane is 8bits deep.

                //BitmapInfo bi = new BitmapInfo();
                //bi.Size=(uint)Marshal.SizeOf(bi);
                //bi.Width = w;
                //bi.Height = h;
                //bi.Planes = 1; //always 1.
                //bi.BitCount = bpp;
                //bi.Compression = comp; //RGB is zero.. uncompressed.
                //bi.SizeImage = (uint)(w * h * bpp / 8);
                //bi.XPelsPerMeter = 0;
                //bi.YPelsPerMeter = 0;
                //bi.ClrUsed = 0;
                //bi.ClrImportant = 0;

                // WMVIDEOINFOHEADER
                //  RECT  rcSource;
                //	RECT  rcTarget;
                //	DWORD  dwBitRate.. bps.. Width*Height*BitCount*Rate.. 320*240*24*29.93295=55172414
                //	DWORD  dwBitErrorRate zero in our sample.
                //	LONGLONG  AvgTimePerFrame in 100ns units.. 334080=10000*1000/29.93295
                //	BITMAPINFOHEADER  bmiHeader copy of the above struct.
                //VideoInfo vi = new VideoInfo();
                //vi.Source.left	= 0;
                //vi.Source.top	= 0;
                //vi.Source.bottom = bi.Height;
                //vi.Source.right	= bi.Width;
                //vi.Target		= vi.Source;
                //vi.BitRate		= (uint)(w * h * bpp * fps);
                //vi.BitErrorRate	= 0;
                //vi.AvgTimePerFrame = (UInt64) ((10000 * 1000) / fps);
                //vi.BitmapInfo = bi;

                UW.CSE.MDShow.MediaTypeVideoInfo vi = mt.ToMediaTypeVideoInfo();
                IntPtr viPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vi.VideoInfo));
                Marshal.StructureToPtr(vi.VideoInfo, viPtr, true);

                // WM_MEDIA_TYPE
                //	GUID  majortype WMMEDIATYPE_Video
                //	GUID  subtype WMMEDIASUBTYPE_RGB24 in our sample
                //	BOOL  bFixedSizeSamples TRUE
                //	BOOL  bTemporalCompression FALSE
                //	ULONG  lSampleSize in bytes This was zero in our sample, but could be 320*240*24/8=230400
                //	GUID  formattype WMFORMAT_VideoInfo
                //	IUnknown*  pUnk NULL
                //	ULONG  cbFormat size of the WMVIDEOINFOHEADER
                //	[size_is(cbFormat)] BYTE  *pbFormat pointer to the WMVIDEOINFOHEADER

                //Note WM_MEDIA_TYPE is the same as Directshow's AM_MEDIA_TYPE.
                //WM_MEDIA_TYPE   mt;
                wmt.majortype            = WMGuids.ToGUID(mt.MajorTypeAsGuid);
                wmt.subtype              = WMGuids.ToGUID(mt.SubTypeAsGuid);
                wmt.bFixedSizeSamples    = mt.FixedSizeSamples?1:0;
                wmt.bTemporalCompression = mt.TemporalCompression?1:0;
                //mt.lSampleSize = w * h * bpp / 8;  // this was zero in avinetwrite!
                wmt.lSampleSize = 0;                 //hmm.  Don't think it matters??
                wmt.formattype  = WMGuids.ToGUID(mt.FormatTypeAsGuid);
                wmt.pUnk        = null;
                wmt.cbFormat    = (uint)Marshal.SizeOf(vi.VideoInfo);
                wmt.pbFormat    = viPtr;

                //PRI3: redesign so that this is freed:
                //Marshal.FreeCoTaskMem(viPtr);
            }
            else if (mt.MajorType == UW.CSE.MDShow.MajorType.Audio)
            {
                //				WaveFormatEx wfex = new WaveFormatEx();
                //
                //				wfex.FormatTag = 1; //1==WAVE_FORMAT_PCM
                //				wfex.Channels = 1;
                //				wfex.SamplesPerSec = 16000;
                //				wfex.AvgBytesPerSec =  32000;
                //				wfex.BlockAlign = 2;
                //				wfex.BitsPerSample = 16;
                //				wfex.Size = 0;

                UW.CSE.MDShow.MediaTypeWaveFormatEx wfex = mt.ToMediaTypeWaveFormatEx();
                IntPtr wfexPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(wfex.WaveFormatEx));
                Marshal.StructureToPtr(wfex.WaveFormatEx, wfexPtr, true);

                wmt.majortype            = WMGuids.ToGUID(mt.MajorTypeAsGuid);    //WMGuids.ToGUID(WMGuids.WMMEDIATYPE_Audio);
                wmt.subtype              = WMGuids.ToGUID(mt.SubTypeAsGuid);      //WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_PCM);
                wmt.bFixedSizeSamples    = mt.FixedSizeSamples?1:0;               //1; //true
                wmt.bTemporalCompression = mt.TemporalCompression?1:0;            //0; //false
                wmt.lSampleSize          = (uint)mt.SampleSize;                   //2;
                wmt.formattype           = WMGuids.ToGUID(mt.FormatTypeAsGuid);   //WMGuids.ToGUID(WMGuids.WMFORMAT_WaveFormatEx);  //This is the only value permitted.
                wmt.pUnk     = null;
                wmt.cbFormat = (uint)Marshal.SizeOf(wfex.WaveFormatEx) + wfex.WaveFormatEx.Size;
                wmt.pbFormat = wfexPtr;

                //try
                //{
                //  Used GetMediaType to sanity check the managed structs:
                //uint size = 0;
                //audioProps.GetMediaType(IntPtr.Zero,ref size);
                //IntPtr mtPtr = Marshal.AllocCoTaskMem((int)size);
                //audioProps.GetMediaType(mtPtr,ref size);
                //_WMMediaType mt2 = (_WMMediaType)Marshal.PtrToStructure(mtPtr,typeof(_WMMediaType));
                //WMMediaType.WaveFormatEx wfex2 = (WMMediaType.WaveFormatEx)Marshal.PtrToStructure(mt2.pbFormat,typeof(WMMediaType.WaveFormatEx));
                //  Examine here.
                //Marshal.StructureToPtr(mt,mtPtr,true);
                //audioProps.SetMediaType( mtPtr );
                //}
                //catch (Exception e)
                //{
                //	Debug.WriteLine("Failed to set audio properties: " + e.ToString());
                //	return wmt;
                //}

                //PRI3: redesign so that this is freed:
                //Marshal.FreeCoTaskMem(wfexPtr);
            }

            return(wmt);
        }