private WaveFormat GetCurrentWaveFormat(IMFSourceReader reader) { IMFMediaType uncompressedMediaType; reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out uncompressedMediaType); // Two ways to query it, first is to ask for properties (second is to convert into WaveFormatEx using MFCreateWaveFormatExFromMFMediaType) var outputMediaType = new MediaType(uncompressedMediaType); Guid actualMajorType = outputMediaType.MajorType; Debug.Assert(actualMajorType == MediaTypes.MFMediaType_Audio); Guid audioSubType = outputMediaType.SubType; int channels = outputMediaType.ChannelCount; int bits = outputMediaType.BitsPerSample; int sampleRate = outputMediaType.SampleRate; if (audioSubType == AudioSubtypes.MFAudioFormat_PCM) { return(new WaveFormat(sampleRate, bits, channels)); } if (audioSubType == AudioSubtypes.MFAudioFormat_Float) { return(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels)); } var subTypeDescription = FieldDescriptionHelper.Describe(typeof(AudioSubtypes), audioSubType); throw new InvalidDataException(String.Format("Unsupported audio sub Type {0}", subTypeDescription)); }
private static MediaType GetCurrentMediaType(IMFSourceReader reader) { IMFMediaType mediaType; reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out mediaType); return(new MediaType(mediaType)); }
private static MediaType GetCurrentMediaType(IMFSourceReader reader) { IMFMediaType mediaType; reader.GetCurrentMediaType(-3, out mediaType); return(new MediaType(mediaType)); }
/// <summary> /// Gets the current media type for a streamIndex. /// </summary> /// <param name="sourceReader">A valid IMFSourceReader instance.</param></param> /// <param name="streamIndex">The streamIndex to query.</param> /// <param name="mediaType">Receives an instance of the IMFMediaType interface.</param> /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns> public static HResult GetCurrentMediaType(this IMFSourceReader sourceReader, SourceReaderFirstStream streamIndex, out IMFMediaType mediaType) { if (sourceReader == null) { throw new ArgumentNullException("sourceReader"); } return(sourceReader.GetCurrentMediaType((int)streamIndex, out mediaType)); }
private WaveFormat GetCurrentWaveFormat(IMFSourceReader reader) { IMFMediaType mediaType; reader.GetCurrentMediaType(-3, out mediaType); MediaType mediaType2 = new MediaType(mediaType); Guid majorType = mediaType2.MajorType; Guid subType = mediaType2.SubType; int channelCount = mediaType2.ChannelCount; int bitsPerSample = mediaType2.BitsPerSample; int sampleRate = mediaType2.SampleRate; if (!(subType == AudioSubtypes.MFAudioFormat_PCM)) { return(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channelCount)); } return(new WaveFormat(sampleRate, bitsPerSample, channelCount)); }
private WaveFormat GetCurrentWaveFormat(IMFSourceReader reader, bool useOverrides, int rate, int chan, int bps) { IMFMediaType uncompressedMediaType; reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out uncompressedMediaType); // Two ways to query it, first is to ask for properties (second is to convert into WaveFormatEx using MFCreateWaveFormatExFromMFMediaType) var outputMediaType = new MediaType(uncompressedMediaType); Guid actualMajorType = outputMediaType.MajorType; Debug.Assert(actualMajorType == MediaTypes.MFMediaType_Audio); Guid audioSubType = outputMediaType.SubType; int channels = !useOverrides ? outputMediaType.ChannelCount: chan; int bits = !useOverrides ? outputMediaType.BitsPerSample : bps; int sampleRate = !useOverrides ? outputMediaType.SampleRate : rate; return(audioSubType == AudioSubtypes.MFAudioFormat_PCM ? new WaveFormat(sampleRate, bits, channels) : WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels)); }
private static void GetSourceReaderAttributes(IMFSourceReader reader, out int width, out int height, out int fps) { Marshal.ThrowExceptionForHR((int)reader.GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, out IMFMediaType videoMediaType)); try { Marshal.ThrowExceptionForHR((int)videoMediaType.GetSize(MFAttributesClsid.MF_MT_FRAME_SIZE, out uint w, out uint h)); Marshal.ThrowExceptionForHR((int)videoMediaType.GetRatio(MFAttributesClsid.MF_MT_FRAME_RATE, out uint fpsNumerator, out uint fpsDenominator)); width = (int)w; height = (int)h; fps = ((int)fpsNumerator + (int)fpsDenominator / 2) / (int)fpsDenominator; } finally { Marshal.ReleaseComObject(videoMediaType); } }
private WaveFormat GetCurrentWaveFormat(IMFSourceReader reader) { IMFMediaType mediaType; reader.GetCurrentMediaType(-3, out mediaType); MediaType expr_10 = new MediaType(mediaType); Guid arg_16_0 = expr_10.MajorType; Guid subType = expr_10.SubType; int channelCount = expr_10.ChannelCount; int bitsPerSample = expr_10.BitsPerSample; int sampleRate = expr_10.SampleRate; if (subType == AudioSubtypes.MFAudioFormat_PCM) { return(new WaveFormat(sampleRate, bitsPerSample, channelCount)); } if (subType == AudioSubtypes.MFAudioFormat_Float) { return(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channelCount)); } string arg = FieldDescriptionHelper.Describe(typeof(AudioSubtypes), subType); throw new InvalidDataException(string.Format("Unsupported audio sub Type {0}", arg)); }
public static WaveFormat GetCurrentWaveFormat(byte[] buffer) { MediaFoundationApi.Startup(); var settings = new MediaFoundationReaderSettings(); IMFSourceReader reader = null; //MediaFoundationInterop.MFCreateSourceReaderFromURL(file, null, out reader); reader = MediaFoundationApi.CreateSourceReaderFromByteStream(MediaFoundationApi.CreateByteStream(new ComStream(new MemoryStream(buffer)))); reader.SetStreamSelection(MediaFoundationInterop.MF_SOURCE_READER_ALL_STREAMS, false); reader.SetStreamSelection(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, true); // Create a partial media type indicating that we want uncompressed PCM audio var partialMediaType = new MediaType(); partialMediaType.MajorType = MediaTypes.MFMediaType_Audio; partialMediaType.SubType = settings.RequestFloatOutput ? AudioSubtypes.MFAudioFormat_Float : AudioSubtypes.MFAudioFormat_PCM; var currentMediaType = GetCurrentMediaType(reader); // mono, low sample rate files can go wrong on Windows 10 unless we specify here partialMediaType.ChannelCount = currentMediaType.ChannelCount; partialMediaType.SampleRate = currentMediaType.SampleRate; try { // set the media type // can return MF_E_INVALIDMEDIATYPE if not supported reader.SetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, IntPtr.Zero, partialMediaType.MediaFoundationObject); } catch (COMException ex) when(ex.GetHResult() == MediaFoundationErrors.MF_E_INVALIDMEDIATYPE) { // HE-AAC (and v2) seems to halve the samplerate if (currentMediaType.SubType == AudioSubtypes.MFAudioFormat_AAC && currentMediaType.ChannelCount == 1) { partialMediaType.SampleRate = currentMediaType.SampleRate *= 2; partialMediaType.ChannelCount = currentMediaType.ChannelCount *= 2; reader.SetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, IntPtr.Zero, partialMediaType.MediaFoundationObject); } else { throw; } } IMFMediaType uncompressedMediaType; reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out uncompressedMediaType); // Two ways to query it, first is to ask for properties (second is to convert into WaveFormatEx using MFCreateWaveFormatExFromMFMediaType) var outputMediaType = new MediaType(uncompressedMediaType); Guid actualMajorType = outputMediaType.MajorType; Debug.Assert(actualMajorType == MediaTypes.MFMediaType_Audio); Guid audioSubType = outputMediaType.SubType; int channels = outputMediaType.ChannelCount; int bits = outputMediaType.BitsPerSample; int sampleRate = outputMediaType.SampleRate; Marshal.ReleaseComObject(currentMediaType.MediaFoundationObject); //Marshal.ReleaseComObject(reader); if (audioSubType == AudioSubtypes.MFAudioFormat_PCM) { return(new WaveFormat(sampleRate, bits, channels)); } if (audioSubType == AudioSubtypes.MFAudioFormat_Float) { return(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels)); } var subTypeDescription = FieldDescriptionHelper.Describe(typeof(AudioSubtypes), audioSubType); throw new InvalidDataException(String.Format("Unsupported audio sub Type {0}", subTypeDescription)); }
private WaveFormat GetCurrentWaveFormat(IMFSourceReader reader) { IMFMediaType uncompressedMediaType; reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out uncompressedMediaType); // Two ways to query it, first is to ask for properties (second is to convert into WaveFormatEx using MFCreateWaveFormatExFromMFMediaType) var outputMediaType = new MediaType(uncompressedMediaType); Guid actualMajorType = outputMediaType.MajorType; Debug.Assert(actualMajorType == MediaTypes.MFMediaType_Audio); Guid audioSubType = outputMediaType.SubType; int channels = outputMediaType.ChannelCount; int bits = outputMediaType.BitsPerSample; int sampleRate = outputMediaType.SampleRate; if (audioSubType == AudioSubtypes.MFAudioFormat_PCM) return new WaveFormat(sampleRate, bits, channels); if (audioSubType == AudioSubtypes.MFAudioFormat_Float) return WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); var subTypeDescription = FieldDescriptionHelper.Describe(typeof (AudioSubtypes), audioSubType); throw new InvalidDataException(String.Format("Unsupported audio sub Type {0}", subTypeDescription)); }
private static MediaType GetCurrentMediaType(IMFSourceReader reader) { IMFMediaType mediaType; reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out mediaType); return new MediaType(mediaType); }
private WaveFormat GetCurrentWaveFormat(IMFSourceReader reader) { IMFMediaType uncompressedMediaType; reader.GetCurrentMediaType(MediaFoundationInterop.MF_SOURCE_READER_FIRST_AUDIO_STREAM, out uncompressedMediaType); // Two ways to query it, first is to ask for properties (second is to convert into WaveFormatEx using MFCreateWaveFormatExFromMFMediaType) var outputMediaType = new MediaType(uncompressedMediaType); Guid actualMajorType = outputMediaType.MajorType; Debug.Assert(actualMajorType == MediaTypes.MFMediaType_Audio); Guid audioSubType = outputMediaType.SubType; int channels = outputMediaType.ChannelCount; int bits = outputMediaType.BitsPerSample; int sampleRate = outputMediaType.SampleRate; return audioSubType == AudioSubtypes.MFAudioFormat_PCM ? new WaveFormat(sampleRate, bits, channels) : WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels); }
void ValidateMP4OutputFile(string mp4filepath) { ulong duration = 0; uint videoWidth = 0; uint videoHeight = 0; double videoFPS = 0.0; uint videoBitrate = 0; try { IMFMediaSource mediaSource = null; IMFSourceReader sourceReader = null; ulong videoSize = 0; ulong frameRate = 0; MFHelper.IMFMediaType mediaType = null; IMFPresentationDescriptor presentationDescriptor = null; uint objectType = default(uint); object objectSource = null; API.MFStartup(); // Create the media source using source resolver and the input URL IMFSourceResolver sourceResolver = null; API.MFCreateSourceResolver(out sourceResolver); // sourceResolver.CreateObjectFromURL("..\\..\\Apps\\SmartCam\\SmartRecorder\\Output\\VideoWriterTest\\CreateTestWMVFile_640x480_24fps_15s\\TestMP4File_640x480_24fps_15s.mp4", Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource); sourceResolver.CreateObjectFromURL(mp4filepath, Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource); mediaSource = (IMFMediaSource)objectSource; API.MFCreateSourceReaderFromMediaSource(mediaSource, null, out sourceReader); mediaSource.CreatePresentationDescriptor(out presentationDescriptor); // Get the duration presentationDescriptor.GetUINT64(new Guid(Consts.MF_PD_DURATION), out duration); // Get the video width and height sourceReader.GetCurrentMediaType(0, out mediaType); mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_SIZE), out videoSize); videoWidth = (uint)(videoSize >> 32); videoHeight = (uint)(videoSize & 0x00000000FFFFFFFF); // Get the Frame Rate mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_RATE), out frameRate); if ((frameRate & 0x00000000FFFFFFFF) != 0) { videoFPS = (double)(frameRate >> 32) / (double)(frameRate & 0x00000000FFFFFFFF); } // Get the encoding bitrate mediaType.GetUINT32(new Guid(Consts.MF_MT_AVG_BITRATE), out videoBitrate); API.MFShutdown(); } catch (Exception exception) { Console.WriteLine("Exception failure: {0}", exception.ToString()); Assert.IsFalse(true); } Assert.IsFalse(Math.Abs((double)duration - (double)VIDEO_DURATION_IN_100_NS) > (double)VIDEO_DURATION_VAR_IN_100_NS); Assert.IsFalse(videoWidth != VIDEO_WIDTH); Assert.IsFalse(videoHeight != VIDEO_HEIGHT); Assert.IsFalse(Math.Abs(videoFPS - VIDEO_FPS) > VIDEO_FPS_VAR); Assert.IsFalse(Math.Abs((int)videoBitrate - VIDEO_ENCODE_BITRATE) > VIDEO_ENCODE_BITRATE_VAR); }