示例#1
0
        /// <summary>
        /// Allocate storage for all buffers.  This should be called after CreateBufferIndex,
        /// after SetMediaTypes and before Start.  Also create the audio mixer here.
        /// </summary>
        /// <returns></returns>
        public bool CreateBuffers()
        {
            if ((audioMediaType == null) || (videoMediaType == null))
            {
                return(false);
            }

            UW.CSE.MDShow.MediaTypeWaveFormatEx wf = audioMediaType.ToMediaTypeWaveFormatEx();
            audioMixer = new AudioMixer(wf.WaveFormatEx.BitsPerSample, wf.WaveFormatEx.AvgBytesPerSec, wf.WaveFormatEx.Channels);

            if (videoBuffer != null)
            {
                if (!videoBuffer.Create(videoMediaType))
                {
                    return(false);
                }
            }
            else
            {
                return(false);
            }

            lock (this)
            {
                foreach (AudioBuffer ab in audioBuffers.Values)
                {
                    if (!ab.Create())
                    {
                        return(false);
                    }
                }
            }
            return(true);
        }
示例#2
0
        /// <summary>
        /// Allocates the storage used by the buffer.  After this method call
        /// the buffer is ready to be started.
        /// </summary>
        /// <param name="mt"></param>
        /// <returns></returns>
        public bool Create()
        {
            Debug.Assert(myMediaType != null);

            UW.CSE.MDShow.MediaTypeWaveFormatEx wf = myMediaType.ToMediaTypeWaveFormatEx();
            uint bytesPerSec = wf.WaveFormatEx.AvgBytesPerSec;

            currentChannels = wf.WaveFormatEx.Channels;

            if ((bytesPerSec == 0) || (SIZE_IN_SECONDS == 0))
            {
                return(false);
            }

            //Use about 1/4 second dummy audio sample to correct for lost audio data, or to resync.
            // This has to be evenly divisible by the BlockAlign value which we assume to be 2 or 4.
            // If we assume 4, it works for 2 as well.
            SilenceSize = (uint)(bytesPerSec / 16) * 4;

            Buffer        = new BufferChunk((int)bytesPerSec * SIZE_IN_SECONDS);
            Buffer.Length = (int)bytesPerSec * SIZE_IN_SECONDS;

            PresTime    = new ulong[SIZE_IN_SECONDS];
            QuietBuffer = new byte[SilenceSize];

            AddAudio       = 0;
            SkipAudio      = 0;
            SampleSize     = bytesPerSec;
            SampleCount    = SIZE_IN_SECONDS;
            WriteOffset    = 0;
            ReadOffset     = 0;
            SamplesReady   = 0;
            BufferSize     = (uint)bytesPerSec * SIZE_IN_SECONDS;
            BufferPos      = 0;
            LeftoverBytes  = 0;
            samplesWritten = 0;
            streamStopTime = 0;
            bytesBuffered  = 0;
            started        = false;
            sampleReceived = false;
            return(true);
        }
示例#3
0
        /// <summary>
        /// Copy the LST managed MediaType to the Windows Media Interop type
        /// </summary>
        /// <param name="mt"></param>
        /// <returns></returns>
        private _WMMediaType ConvertMediaType(UW.CSE.MDShow.MediaType mt)
        {
            _WMMediaType wmt = new _WMMediaType();

            if (mt == null)
            {
                return(wmt);
            }

            if (mt.MajorType == UW.CSE.MDShow.MajorType.Video)
            {
                // Basic video settings:
                //int w=320;
                //int h=240;
                //int fps=30;

                // For RGB24:
                //ushort bpp=24;
                //uint comp=0;
                //GUID stype = WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_RGB24);

                // ..or for I420:
                //WORD bpp=12;
                //DWORD comp=0x30323449;
                //GUID stype= WMMEDIASUBTYPE_I420;

                // Settings for the video stream:
                // BITMAPINFOHEADER
                //  DWORD  biSize = size of the struct in bytes.. 40
                //	LONG   biWidth - Frame width
                //	LONG   biHeight	- height could be negative indicating top-down dib.
                //	WORD   biPlanes - must be 1.
                //	WORD   biBitCount 24 in our sample with RGB24
                //	DWORD  biCompression 0 for RGB
                //	DWORD  biSizeImage in bytes.. biWidth*biHeight*biBitCount/8
                //	LONG   biXPelsPerMeter 0
                //	LONG   biYPelsPerMeter 0;
                //	DWORD  biClrUsed must be 0
                //	DWORD  biClrImportant 0
                //
                //	notes:
                //		biCompression may be a packed 'fourcc' code, for example I420 is 0x30323449, IYUV = 0x56555949...
                //		I420 and IYUV are identical formats.  They use 12 bits per pixel, and are planar,  comprised of
                //		nxm Y plane followed by n/2 x m/2 U and V planes.  Each plane is 8bits deep.

                //BitmapInfo bi = new BitmapInfo();
                //bi.Size=(uint)Marshal.SizeOf(bi);
                //bi.Width = w;
                //bi.Height = h;
                //bi.Planes = 1; //always 1.
                //bi.BitCount = bpp;
                //bi.Compression = comp; //RGB is zero.. uncompressed.
                //bi.SizeImage = (uint)(w * h * bpp / 8);
                //bi.XPelsPerMeter = 0;
                //bi.YPelsPerMeter = 0;
                //bi.ClrUsed = 0;
                //bi.ClrImportant = 0;

                // WMVIDEOINFOHEADER
                //  RECT  rcSource;
                //	RECT  rcTarget;
                //	DWORD  dwBitRate.. bps.. Width*Height*BitCount*Rate.. 320*240*24*29.93295=55172414
                //	DWORD  dwBitErrorRate zero in our sample.
                //	LONGLONG  AvgTimePerFrame in 100ns units.. 334080=10000*1000/29.93295
                //	BITMAPINFOHEADER  bmiHeader copy of the above struct.
                //VideoInfo vi = new VideoInfo();
                //vi.Source.left	= 0;
                //vi.Source.top	= 0;
                //vi.Source.bottom = bi.Height;
                //vi.Source.right	= bi.Width;
                //vi.Target		= vi.Source;
                //vi.BitRate		= (uint)(w * h * bpp * fps);
                //vi.BitErrorRate	= 0;
                //vi.AvgTimePerFrame = (UInt64) ((10000 * 1000) / fps);
                //vi.BitmapInfo = bi;

                UW.CSE.MDShow.MediaTypeVideoInfo vi = mt.ToMediaTypeVideoInfo();
                IntPtr viPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vi.VideoInfo));
                Marshal.StructureToPtr(vi.VideoInfo, viPtr, true);

                // WM_MEDIA_TYPE
                //	GUID  majortype WMMEDIATYPE_Video
                //	GUID  subtype WMMEDIASUBTYPE_RGB24 in our sample
                //	BOOL  bFixedSizeSamples TRUE
                //	BOOL  bTemporalCompression FALSE
                //	ULONG  lSampleSize in bytes This was zero in our sample, but could be 320*240*24/8=230400
                //	GUID  formattype WMFORMAT_VideoInfo
                //	IUnknown*  pUnk NULL
                //	ULONG  cbFormat size of the WMVIDEOINFOHEADER
                //	[size_is(cbFormat)] BYTE  *pbFormat pointer to the WMVIDEOINFOHEADER

                //Note WM_MEDIA_TYPE is the same as Directshow's AM_MEDIA_TYPE.
                //WM_MEDIA_TYPE   mt;
                wmt.majortype            = WMGuids.ToGUID(mt.MajorTypeAsGuid);
                wmt.subtype              = WMGuids.ToGUID(mt.SubTypeAsGuid);
                wmt.bFixedSizeSamples    = mt.FixedSizeSamples?1:0;
                wmt.bTemporalCompression = mt.TemporalCompression?1:0;
                //mt.lSampleSize = w * h * bpp / 8;  // this was zero in avinetwrite!
                wmt.lSampleSize = 0;                 //hmm.  Don't think it matters??
                wmt.formattype  = WMGuids.ToGUID(mt.FormatTypeAsGuid);
                wmt.pUnk        = null;
                wmt.cbFormat    = (uint)Marshal.SizeOf(vi.VideoInfo);
                wmt.pbFormat    = viPtr;

                //PRI3: redesign so that this is freed:
                //Marshal.FreeCoTaskMem(viPtr);
            }
            else if (mt.MajorType == UW.CSE.MDShow.MajorType.Audio)
            {
                //				WaveFormatEx wfex = new WaveFormatEx();
                //
                //				wfex.FormatTag = 1; //1==WAVE_FORMAT_PCM
                //				wfex.Channels = 1;
                //				wfex.SamplesPerSec = 16000;
                //				wfex.AvgBytesPerSec =  32000;
                //				wfex.BlockAlign = 2;
                //				wfex.BitsPerSample = 16;
                //				wfex.Size = 0;

                UW.CSE.MDShow.MediaTypeWaveFormatEx wfex = mt.ToMediaTypeWaveFormatEx();
                IntPtr wfexPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(wfex.WaveFormatEx));
                Marshal.StructureToPtr(wfex.WaveFormatEx, wfexPtr, true);

                wmt.majortype            = WMGuids.ToGUID(mt.MajorTypeAsGuid);    //WMGuids.ToGUID(WMGuids.WMMEDIATYPE_Audio);
                wmt.subtype              = WMGuids.ToGUID(mt.SubTypeAsGuid);      //WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_PCM);
                wmt.bFixedSizeSamples    = mt.FixedSizeSamples?1:0;               //1; //true
                wmt.bTemporalCompression = mt.TemporalCompression?1:0;            //0; //false
                wmt.lSampleSize          = (uint)mt.SampleSize;                   //2;
                wmt.formattype           = WMGuids.ToGUID(mt.FormatTypeAsGuid);   //WMGuids.ToGUID(WMGuids.WMFORMAT_WaveFormatEx);  //This is the only value permitted.
                wmt.pUnk     = null;
                wmt.cbFormat = (uint)Marshal.SizeOf(wfex.WaveFormatEx) + wfex.WaveFormatEx.Size;
                wmt.pbFormat = wfexPtr;

                //try
                //{
                //  Used GetMediaType to sanity check the managed structs:
                //uint size = 0;
                //audioProps.GetMediaType(IntPtr.Zero,ref size);
                //IntPtr mtPtr = Marshal.AllocCoTaskMem((int)size);
                //audioProps.GetMediaType(mtPtr,ref size);
                //_WMMediaType mt2 = (_WMMediaType)Marshal.PtrToStructure(mtPtr,typeof(_WMMediaType));
                //WMMediaType.WaveFormatEx wfex2 = (WMMediaType.WaveFormatEx)Marshal.PtrToStructure(mt2.pbFormat,typeof(WMMediaType.WaveFormatEx));
                //  Examine here.
                //Marshal.StructureToPtr(mt,mtPtr,true);
                //audioProps.SetMediaType( mtPtr );
                //}
                //catch (Exception e)
                //{
                //	Debug.WriteLine("Failed to set audio properties: " + e.ToString());
                //	return wmt;
                //}

                //PRI3: redesign so that this is freed:
                //Marshal.FreeCoTaskMem(wfexPtr);
            }

            return(wmt);
        }