Exemplo n.º 1
0
        /// <summary>
        /// Hardcode audio config for testing.
        /// </summary>
        /// <returns></returns>
        public bool ConfigAudio()
        {
            //make up some media types for testing

            WAVEFORMATEX wfex = new WAVEFORMATEX();

            wfex.FormatTag      = 1;        //1==WAVE_FORMAT_PCM
            wfex.Channels       = 1;
            wfex.SamplesPerSec  = 16000;
            wfex.AvgBytesPerSec = 32000;
            wfex.BlockAlign     = 2;
            wfex.BitsPerSample  = 16;
            wfex.Size           = 0;

            IntPtr wfexPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(wfex));

            Marshal.StructureToPtr(wfex, wfexPtr, true);

            _WMMediaType mt = new _WMMediaType();

            mt.majortype            = WMGuids.ToGUID(WMGuids.WMMEDIATYPE_Audio);
            mt.subtype              = WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_PCM);
            mt.bFixedSizeSamples    = 1;                                             //true
            mt.bTemporalCompression = 0;                                             //false
            mt.lSampleSize          = 2;
            mt.formattype           = WMGuids.ToGUID(WMGuids.WMFORMAT_WaveFormatEx); //This is the only value permitted.
            mt.pUnk     = null;
            mt.cbFormat = (uint)Marshal.SizeOf(wfex) + wfex.Size;
            mt.pbFormat = wfexPtr;

            try
            {
                //  Used GetMediaType to sanity check the managed structs:
                //uint size = 0;
                //audioProps.GetMediaType(IntPtr.Zero,ref size);
                //IntPtr mtPtr = Marshal.AllocCoTaskMem((int)size);
                //audioProps.GetMediaType(mtPtr,ref size);
                //_WMMediaType mt2 = (_WMMediaType)Marshal.PtrToStructure(mtPtr,typeof(_WMMediaType));
                //WMMediaType.WaveFormatEx wfex2 = (WMMediaType.WaveFormatEx)Marshal.PtrToStructure(mt2.pbFormat,typeof(WMMediaType.WaveFormatEx));
                //  Examine here.
                //Marshal.StructureToPtr(mt,mtPtr,true);
                //audioProps.SetMediaType( mtPtr );
            }
            catch (Exception e)
            {
                eventLog.WriteEntry("Failed to set audio properties: " + e.ToString(), EventLogEntryType.Error, 1000);
                Debug.WriteLine("Failed to set audio properties: " + e.ToString());
                return(false);
            }

            bool ret = ConfigAudio(mt);

            Marshal.FreeCoTaskMem(wfexPtr);
            return(ret);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Copy the LST managed MediaType to the Windows Media Interop type
        /// </summary>
        /// <param name="mt"></param>
        /// <returns></returns>
        private _WMMediaType ConvertMediaType(UW.CSE.MDShow.MediaType mt)
        {
            _WMMediaType wmt = new _WMMediaType();

            if (mt == null)
            {
                return(wmt);
            }

            if (mt.MajorType == UW.CSE.MDShow.MajorType.Video)
            {
                // Basic video settings:
                //int w=320;
                //int h=240;
                //int fps=30;

                // For RGB24:
                //ushort bpp=24;
                //uint comp=0;
                //GUID stype = WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_RGB24);

                // ..or for I420:
                //WORD bpp=12;
                //DWORD comp=0x30323449;
                //GUID stype= WMMEDIASUBTYPE_I420;

                // Settings for the video stream:
                // BITMAPINFOHEADER
                //  DWORD  biSize = size of the struct in bytes.. 40
                //	LONG   biWidth - Frame width
                //	LONG   biHeight	- height could be negative indicating top-down dib.
                //	WORD   biPlanes - must be 1.
                //	WORD   biBitCount 24 in our sample with RGB24
                //	DWORD  biCompression 0 for RGB
                //	DWORD  biSizeImage in bytes.. biWidth*biHeight*biBitCount/8
                //	LONG   biXPelsPerMeter 0
                //	LONG   biYPelsPerMeter 0;
                //	DWORD  biClrUsed must be 0
                //	DWORD  biClrImportant 0
                //
                //	notes:
                //		biCompression may be a packed 'fourcc' code, for example I420 is 0x30323449, IYUV = 0x56555949...
                //		I420 and IYUV are identical formats.  They use 12 bits per pixel, and are planar,  comprised of
                //		nxm Y plane followed by n/2 x m/2 U and V planes.  Each plane is 8bits deep.

                //BitmapInfo bi = new BitmapInfo();
                //bi.Size=(uint)Marshal.SizeOf(bi);
                //bi.Width = w;
                //bi.Height = h;
                //bi.Planes = 1; //always 1.
                //bi.BitCount = bpp;
                //bi.Compression = comp; //RGB is zero.. uncompressed.
                //bi.SizeImage = (uint)(w * h * bpp / 8);
                //bi.XPelsPerMeter = 0;
                //bi.YPelsPerMeter = 0;
                //bi.ClrUsed = 0;
                //bi.ClrImportant = 0;

                // WMVIDEOINFOHEADER
                //  RECT  rcSource;
                //	RECT  rcTarget;
                //	DWORD  dwBitRate.. bps.. Width*Height*BitCount*Rate.. 320*240*24*29.93295=55172414
                //	DWORD  dwBitErrorRate zero in our sample.
                //	LONGLONG  AvgTimePerFrame in 100ns units.. 334080=10000*1000/29.93295
                //	BITMAPINFOHEADER  bmiHeader copy of the above struct.
                //VideoInfo vi = new VideoInfo();
                //vi.Source.left	= 0;
                //vi.Source.top	= 0;
                //vi.Source.bottom = bi.Height;
                //vi.Source.right	= bi.Width;
                //vi.Target		= vi.Source;
                //vi.BitRate		= (uint)(w * h * bpp * fps);
                //vi.BitErrorRate	= 0;
                //vi.AvgTimePerFrame = (UInt64) ((10000 * 1000) / fps);
                //vi.BitmapInfo = bi;

                UW.CSE.MDShow.MediaTypeVideoInfo vi = mt.ToMediaTypeVideoInfo();
                IntPtr viPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vi.VideoInfo));
                Marshal.StructureToPtr(vi.VideoInfo, viPtr, true);

                // WM_MEDIA_TYPE
                //	GUID  majortype WMMEDIATYPE_Video
                //	GUID  subtype WMMEDIASUBTYPE_RGB24 in our sample
                //	BOOL  bFixedSizeSamples TRUE
                //	BOOL  bTemporalCompression FALSE
                //	ULONG  lSampleSize in bytes This was zero in our sample, but could be 320*240*24/8=230400
                //	GUID  formattype WMFORMAT_VideoInfo
                //	IUnknown*  pUnk NULL
                //	ULONG  cbFormat size of the WMVIDEOINFOHEADER
                //	[size_is(cbFormat)] BYTE  *pbFormat pointer to the WMVIDEOINFOHEADER

                //Note WM_MEDIA_TYPE is the same as Directshow's AM_MEDIA_TYPE.
                //WM_MEDIA_TYPE   mt;
                wmt.majortype            = WMGuids.ToGUID(mt.MajorTypeAsGuid);
                wmt.subtype              = WMGuids.ToGUID(mt.SubTypeAsGuid);
                wmt.bFixedSizeSamples    = mt.FixedSizeSamples?1:0;
                wmt.bTemporalCompression = mt.TemporalCompression?1:0;
                //mt.lSampleSize = w * h * bpp / 8;  // this was zero in avinetwrite!
                wmt.lSampleSize = 0;                 //hmm.  Don't think it matters??
                wmt.formattype  = WMGuids.ToGUID(mt.FormatTypeAsGuid);
                wmt.pUnk        = null;
                wmt.cbFormat    = (uint)Marshal.SizeOf(vi.VideoInfo);
                wmt.pbFormat    = viPtr;

                //PRI3: redesign so that this is freed:
                //Marshal.FreeCoTaskMem(viPtr);
            }
            else if (mt.MajorType == UW.CSE.MDShow.MajorType.Audio)
            {
                //				WaveFormatEx wfex = new WaveFormatEx();
                //
                //				wfex.FormatTag = 1; //1==WAVE_FORMAT_PCM
                //				wfex.Channels = 1;
                //				wfex.SamplesPerSec = 16000;
                //				wfex.AvgBytesPerSec =  32000;
                //				wfex.BlockAlign = 2;
                //				wfex.BitsPerSample = 16;
                //				wfex.Size = 0;

                UW.CSE.MDShow.MediaTypeWaveFormatEx wfex = mt.ToMediaTypeWaveFormatEx();
                IntPtr wfexPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(wfex.WaveFormatEx));
                Marshal.StructureToPtr(wfex.WaveFormatEx, wfexPtr, true);

                wmt.majortype            = WMGuids.ToGUID(mt.MajorTypeAsGuid);    //WMGuids.ToGUID(WMGuids.WMMEDIATYPE_Audio);
                wmt.subtype              = WMGuids.ToGUID(mt.SubTypeAsGuid);      //WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_PCM);
                wmt.bFixedSizeSamples    = mt.FixedSizeSamples?1:0;               //1; //true
                wmt.bTemporalCompression = mt.TemporalCompression?1:0;            //0; //false
                wmt.lSampleSize          = (uint)mt.SampleSize;                   //2;
                wmt.formattype           = WMGuids.ToGUID(mt.FormatTypeAsGuid);   //WMGuids.ToGUID(WMGuids.WMFORMAT_WaveFormatEx);  //This is the only value permitted.
                wmt.pUnk     = null;
                wmt.cbFormat = (uint)Marshal.SizeOf(wfex.WaveFormatEx) + wfex.WaveFormatEx.Size;
                wmt.pbFormat = wfexPtr;

                //try
                //{
                //  Used GetMediaType to sanity check the managed structs:
                //uint size = 0;
                //audioProps.GetMediaType(IntPtr.Zero,ref size);
                //IntPtr mtPtr = Marshal.AllocCoTaskMem((int)size);
                //audioProps.GetMediaType(mtPtr,ref size);
                //_WMMediaType mt2 = (_WMMediaType)Marshal.PtrToStructure(mtPtr,typeof(_WMMediaType));
                //WMMediaType.WaveFormatEx wfex2 = (WMMediaType.WaveFormatEx)Marshal.PtrToStructure(mt2.pbFormat,typeof(WMMediaType.WaveFormatEx));
                //  Examine here.
                //Marshal.StructureToPtr(mt,mtPtr,true);
                //audioProps.SetMediaType( mtPtr );
                //}
                //catch (Exception e)
                //{
                //	Debug.WriteLine("Failed to set audio properties: " + e.ToString());
                //	return wmt;
                //}

                //PRI3: redesign so that this is freed:
                //Marshal.FreeCoTaskMem(wfexPtr);
            }

            return(wmt);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Hardcode video config for testing.
        /// </summary>
        /// <returns></returns>
        public bool ConfigVideo()
        {
            // Basic video settings:
            int w   = 320;
            int h   = 240;
            int fps = 30;

            // For RGB24:
            ushort bpp   = 24;
            uint   comp  = 0;
            GUID   stype = WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_RGB24);

            // ..or for I420:
            //WORD bpp=12;
            //DWORD comp=0x30323449;
            //GUID stype= WMMEDIASUBTYPE_I420;

            // Settings for the video stream:
            // BITMAPINFOHEADER
            //  DWORD  biSize = size of the struct in bytes.. 40
            //	LONG   biWidth - Frame width
            //	LONG   biHeight	- height could be negative indicating top-down dib.
            //	WORD   biPlanes - must be 1.
            //	WORD   biBitCount 24 in our sample with RGB24
            //	DWORD  biCompression 0 for RGB
            //	DWORD  biSizeImage in bytes.. biWidth*biHeight*biBitCount/8
            //	LONG   biXPelsPerMeter 0
            //	LONG   biYPelsPerMeter 0;
            //	DWORD  biClrUsed must be 0
            //	DWORD  biClrImportant 0
            //
            //	notes:
            //		biCompression may be a packed 'fourcc' code, for example I420 is 0x30323449, IYUV = 0x56555949...
            //		I420 and IYUV are identical formats.  They use 12 bits per pixel, and are planar,  comprised of
            //		nxm Y plane followed by n/2 x m/2 U and V planes.  Each plane is 8bits deep.

            BITMAPINFOHEADER bi = new BITMAPINFOHEADER();

            bi.Size          = (uint)Marshal.SizeOf(bi);
            bi.Width         = w;
            bi.Height        = h;
            bi.Planes        = 1;      //always 1.
            bi.BitCount      = bpp;
            bi.Compression   = comp;   //RGB is zero.. uncompressed.
            bi.SizeImage     = (uint)(w * h * bpp / 8);
            bi.XPelsPerMeter = 0;
            bi.YPelsPerMeter = 0;
            bi.ClrUsed       = 0;
            bi.ClrImportant  = 0;

            // WMVIDEOINFOHEADER
            //  RECT  rcSource;
            //	RECT  rcTarget;
            //	DWORD  dwBitRate.. bps.. Width*Height*BitCount*Rate.. 320*240*24*29.93295=55172414
            //	DWORD  dwBitErrorRate zero in our sample.
            //	LONGLONG  AvgTimePerFrame in 100ns units.. 334080=10000*1000/29.93295
            //	BITMAPINFOHEADER  bmiHeader copy of the above struct.
            VIDEOINFOHEADER vi = new VIDEOINFOHEADER();
            RECT            r  = new RECT();

            r.Left    = r.Top = 0;
            r.Bottom  = bi.Height;
            r.Right   = bi.Width;
            vi.Source = r;
            //			vi.Source.Left	= 0;
            //			vi.Source.Top	= 0;
            //			vi.Source.Bottom = bi.Height;
            //			vi.Source.Right	= bi.Width;
            vi.Target          = vi.Source;
            vi.BitRate         = (uint)(w * h * bpp * fps);
            vi.BitErrorRate    = 0;
            vi.AvgTimePerFrame = (long)((10000 * 1000) / fps);
            vi.BitmapInfo      = bi;

            IntPtr viPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(vi));

            Marshal.StructureToPtr(vi, viPtr, true);

            // WM_MEDIA_TYPE
            //	GUID  majortype WMMEDIATYPE_Video
            //	GUID  subtype WMMEDIASUBTYPE_RGB24 in our sample
            //	BOOL  bFixedSizeSamples TRUE
            //	BOOL  bTemporalCompression FALSE
            //	ULONG  lSampleSize in bytes This was zero in our sample, but could be 320*240*24/8=230400
            //	GUID  formattype WMFORMAT_VideoInfo
            //	IUnknown*  pUnk NULL
            //	ULONG  cbFormat size of the WMVIDEOINFOHEADER
            //	[size_is(cbFormat)] BYTE  *pbFormat pointer to the WMVIDEOINFOHEADER

            //Note WM_MEDIA_TYPE is the same as Directshow's AM_MEDIA_TYPE.
            //WM_MEDIA_TYPE   mt;
            _WMMediaType mt = new _WMMediaType();

            mt.majortype            = WMGuids.ToGUID(WMGuids.WMMEDIATYPE_Video);
            mt.subtype              = stype;
            mt.bFixedSizeSamples    = 1;
            mt.bTemporalCompression = 0;
            //mt.lSampleSize = w * h * bpp / 8;  // this was zero in avinetwrite!
            mt.lSampleSize = 0;             //hmm.  Don't think it matters??
            mt.formattype  = WMGuids.ToGUID(WMGuids.WMFORMAT_VideoInfo);
            mt.pUnk        = null;
            mt.cbFormat    = (uint)Marshal.SizeOf(vi);
            mt.pbFormat    = viPtr;

            bool ret = ConfigVideo(mt);

            Marshal.FreeCoTaskMem(viPtr);
            return(ret);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Load a WM Profile (system or custom).
        /// </summary>
        /// <param name="prxFile"></param>
        /// <param name="prIndex"></param>
        /// <returns></returns>
        public bool ConfigProfile(String prxFile, uint prIndex)
        {
            IWMProfile profile;

            uint hr = WMFSDKFunctions.WMCreateProfileManager(out profileManager);

            if (prxFile == "")
            {
                //use system profile
                Guid prg = ProfileIndexToGuid(prIndex);
                if (prg == Guid.Empty)
                {
                    profile = null;
                    Debug.WriteLine("Unsupported Profile index.");
                    return(false);
                }

                try
                {
                    GUID prG = WMGuids.ToGUID(prg);
                    profileManager.LoadProfileByID(ref prG, out profile);
                }
                catch (Exception e)
                {
                    eventLog.WriteEntry("Failed to load system profile: " + e.ToString(), EventLogEntryType.Error, 1000);
                    Debug.WriteLine("Failed to load system profile: " + e.ToString());
                    profile = null;
                    return(false);
                }
            }
            else
            {
                //use custom profile
                profile = LoadCustomProfile(prxFile);
                if (profile == null)
                {
                    return(false);
                }
            }

            /// Tell the writer to use this profile.
            try
            {
                writer.SetProfile(profile);
                string name = GetProfileName(profile);
                Debug.WriteLine("Using profile: " + name);
            }
            catch (Exception e)
            {
                eventLog.WriteEntry("Failed to set writer profile: " + e.ToString(), EventLogEntryType.Error, 1000);
                Debug.WriteLine("Failed to set writer profile: " + e.ToString());
                profile = null;
                return(false);
            }

            /// A slightly confusing point:  Streams are subobjects of the profile,
            /// while inputs are subobjects of the Writer.  The difference is in the
            /// multi-bitrate scenario where there may be multiple streams per input.
            /// Stream numbers start with 1, while input numbers and stream indexes begin at 0.
            /// If we have a profile that supports scripts, we need the stream number of
            /// the script stream.  For audio and video, we just need input number.
            scriptBitrate      = 0;
            audioInput         = videoInput = 0;
            scriptStreamNumber = 0;
            audioProps         = videoProps = null;

            /// If the profile has a script stream, find the bitrate and stream number.
            uint            cStreams;
            IWMStreamConfig streamConfig;
            GUID            streamType;

            profile.GetStreamCount(out cStreams);
            for (uint i = 0; i < cStreams; i++)
            {
                profile.GetStream(i, out streamConfig);
                streamConfig.GetStreamType(out streamType);
                if (WMGuids.ToGuid(streamType) == WMGuids.WMMEDIATYPE_Script)
                {
                    streamConfig.GetStreamNumber(out scriptStreamNumber);
                    streamConfig.GetBitrate(out scriptBitrate);
                }
            }

            /// Iterate over writer inputs, holding on to the IWMInputMediaProps* for each,
            /// so we can later configure them.  Also save input numbers for audio and video here.
            uint cInputs;

            writer.GetInputCount(out cInputs);
            GUID guidInputType;
            IWMInputMediaProps inputProps = null;

            for (uint i = 0; i < cInputs; i++)
            {
                writer.GetInputProps(i, out inputProps);
                inputProps.GetType(out guidInputType);
                if (WMGuids.ToGuid(guidInputType) == WMGuids.WMMEDIATYPE_Audio)
                {
                    audioProps = inputProps;
                    audioInput = i;
                }
                else if (WMGuids.ToGuid(guidInputType) == WMGuids.WMMEDIATYPE_Video)
                {
                    videoProps = inputProps;
                    videoInput = i;
                }
                else if (WMGuids.ToGuid(guidInputType) == WMGuids.WMMEDIATYPE_Script)
                {
                }
                else
                {
                    Debug.WriteLine("Profile contains unrecognized media type.");
                    return(false);
                }
            }

            // We require an audio input, since that drives the timing for the whole stream.
            if (audioProps == null)
            {
                Debug.WriteLine("Profile should contain at least one audio input.");
                return(false);
            }

            return(true);
        }