/// <summary> /// Gets a list of all attributes contained in a media type and displays /// them as a human readable name. More or less just for practice /// /// Adapted from /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="mediaTypeObj">the media type object</param> /// <param name="maxAttributes">the maximum number of attributes</param> /// <param name="outSb">The output string</param> /// <param name="ignoreMajorType">if true we ignore the major type attribute</param> /// <param name="ignoreSubType">if true we ignore the sub type attribute</param> public static HResult EnumerateAllAttributeNamesInMediaTypeAsText(IMFMediaType mediaTypeObj, bool ignoreMajorType, bool ignoreSubType, int maxAttributes, out StringBuilder outSb) { // we always return something here outSb = new StringBuilder(); // sanity check if (mediaTypeObj == null) { return(HResult.E_FAIL); } if ((mediaTypeObj is IMFAttributes) == false) { return(HResult.E_FAIL); } // set up to ignore List <string> attributesToIgnore = new List <string>(); if (ignoreMajorType == true) { attributesToIgnore.Add("MF_MT_MAJOR_TYPE"); } if (ignoreSubType == true) { attributesToIgnore.Add("MF_MT_SUBTYPE"); } // just call the generic WMFUtils Attribute Enumerator return(WMFUtils.EnumerateAllAttributeNamesAsText((mediaTypeObj as IMFAttributes), attributesToIgnore, maxAttributes, out outSb)); }
// ######################################################################## // ##### MFTBase_Sync Overrides, all child classes must implement these // ######################################################################## /// <summary> /// The MFT defines a list of available media types for each input stream /// and orders them by preference. This method enumerates the available /// media types for an input stream. /// /// Many clients will just "try it on" with their preferred media type /// and if/when that gets rejected will start enumerating the types the /// transform prefers in order to see if they have one in common /// /// An override of the virtual version in MFTBase_Sync. /// </summary> /// <param name="dwTypeIndex">The (zero-based) index of the type.</param> /// <param name="pInputType">The input type supported by the MFT.</param> /// <returns>S_Ok unless error.</returns> protected override HResult OnEnumInputTypes(int dwTypeIndex, out IMFMediaType pInputType) { // MF.Net Sample comments... // I'd like to skip implementing this, but while some clients // don't require it (PlaybackFX), some do (MEPlayer/IMFMediaEngine). // Although frame counting should be able to run against any type, // we must at a minimum provide a major type. return(WMFUtils.CreatePartialMediaType(dwTypeIndex, MFMediaType.Video, m_MediaSubtypes, out pInputType)); }
public override List <string> Scan() { List <string> Devices = new List <string>(); List <MFDevice> vcDevices = WMFUtils.GetDevicesByCategory(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, CLSID.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); foreach (MFDevice device in vcDevices) { Devices.Add(device.FriendlyName); } return(Devices); }
/// <summary> /// Returns a value indicating if the proposed input type is acceptable to /// this MFT. /// /// An override of the abstract version in MFTBase_Sync. /// </summary> /// <param name="pmt">The type to check. Should never be null.</param> /// <returns>S_Ok if the type is valid or MF_E_INVALIDTYPE.</returns> override protected HResult OnCheckInputType(IMFMediaType pmt) { HResult hr; // We accept any input type as long as the output type // has not been set yet if (OutputType == null) { hr = HResult.S_OK; } else { // Otherwise, proposed input must be identical to the output. hr = WMFUtils.IsMediaTypeIdentical(pmt, OutputType); } return(hr); }
/// <summary> /// Gets the major media type of a IMFMediaType as a text string /// /// Adapted from /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="mediaTypeObj">the media type object</param> /// <param name="outSb">The output string</param> public static HResult GetMediaMajorTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb) { // we always return something here outSb = new StringBuilder(); // sanity check if (mediaTypeObj == null) { return(HResult.E_FAIL); } // MF_MT_MAJOR_TYPE // Major type GUID, we return this as human readable text HResult hr = mediaTypeObj.GetMajorType(out Guid majorType); if (hr == HResult.S_OK) { // only report success outSb.Append("MF_MT_MAJOR_TYPE=" + WMFUtils.ConvertGuidToName(majorType)); } return(HResult.S_OK); }
/// <summary> /// Gets the major media type of a IMFMediaType as a text string /// /// Adapted from /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="mediaTypeObj">the media type object</param> /// <param name="outSb">The output string</param> public static HResult GetMediaSubTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb) { // we always return something here outSb = new StringBuilder(); // sanity check if (mediaTypeObj == null) { return(HResult.E_FAIL); } // MF_MT_SUBTYPE // Subtype GUID which describes the basic media type, we return this as human readable text HResult hr = mediaTypeObj.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid subType); if (hr == HResult.S_OK) { // only report success outSb.Append("MF_MT_SUBTYPE=" + WMFUtils.ConvertGuidToName(subType)); } return(HResult.S_OK); }
public override void Load(string FriendlyName, int FrameRate, int Height, int Width, string Encoding) { HResult Return = MFExtern.MFStartup(0x00020070, MFStartup.Full); if (Return != 0) { Console.WriteLine("Constructor: call to MFExtern.MFStartup returned " + Return.ToString()); } List <MFDevice> vcDevices = WMFUtils.GetDevicesByCategory(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, CLSID.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); foreach (MFDevice device in vcDevices) { if (device.FriendlyName == FriendlyName) { UnderlyingDevice = device; break; } } IMFStreamIndex = GetRequestedStreamFormat(FrameRate, Width, Height, Encoding); IMFSnapIndex = GetRequestedSnapFormat(Width, Height, Encoding); }
/// <summary> /// Starts the process of recording. creates the sink writer. We do not /// check to see if the filename is viable or already exists. This is /// assumed to have been done before this call. /// </summary> /// <param name="outputFileName">the output file name</param> /// <param name="incomingVideoMediaType">the incoming media type</param> /// <param name="wantTimebaseRebaseIn">if true we rebase all incoming sample /// times to zero from the point we started recording and send a copy of the /// sample to the sink writer instead of the input sample</param> /// <returns>z success, nz fail</returns> public int StartRecording(string outputFileName, IMFMediaType incomingVideoMediaType, bool wantTimebaseRebaseIn) { HResult hr; LogMessage("MFTSampleGrabber_Sync, StartRecording called"); // first stop any recordings now StopRecording(); // check the output file name for sanity if ((outputFileName == null) || (outputFileName.Length == 0)) { LogMessage("StartRecording (outputFileName==null)|| (outputFileName.Length==0)"); return(100); } // check the media type for sanity if (incomingVideoMediaType == null) { LogMessage("StartRecording videoMediaType == null"); return(150); } lock (sinkWriterLockObject) { // create the sink writer workingSinkWriter = OpenSinkWriter(outputFileName, true); if (workingSinkWriter == null) { LogMessage("StartRecording failed to create sink writer"); return(200); } // now configure the SinkWriter. This sets up the sink writer so that it knows what format // the output data should be written in. The format we give the writer does not // need to be the same as the format receives as input - however to make life easier for ourselves // we will copy a lot of the settings from the videoType retrieved above // create a new empty media type for us to populate hr = MFExtern.MFCreateMediaType(out IMFMediaType encoderType); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed on call to MFCreateMediaType, retVal=" + hr.ToString()); } // The major type defines the overall category of the media data. Major types include video, audio, script & etc. hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed setting the MF_MT_MAJOR_TYPE, retVal=" + hr.ToString()); } // The subtype GUID defines a specific media format type within a major type. For example, within video, // the subtypes include MFMediaType.H264 (MP4), MFMediaType.WMV3 (WMV), MJPEG & etc. Within audio, the // subtypes include PCM audio, Windows Media Audio 9, & etc. hr = encoderType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MEDIA_TYPETO_WRITE); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed setting the MF_MT_SUBTYPE, retVal=" + hr.ToString()); } // this is the approximate data rate of the video stream, in bits per second, for a // video media type. The choice here is somewhat arbitrary but seems to work well. hr = encoderType.SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, TARGET_BIT_RATE); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed setting the MF_MT_AVG_BITRATE, retVal=" + hr.ToString()); } // populate our new encoding type with the frame size of the videoType selected earlier hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_FRAME_SIZE); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed copying the MF_MT_FRAME_SIZE, retVal=" + hr.ToString()); } // populate our new encoding type with the frame rate of the video type selected earlier hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_FRAME_RATE); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed copying the MF_MT_FRAME_RATE, retVal=" + hr.ToString()); } // populate our new encoding type with the pixel aspect ratio of the video type selected earlier hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed copying the MF_MT_PIXEL_ASPECT_RATIO, retVal=" + hr.ToString()); } // populate our new encoding type with the interlace mode of the video type selected earlier hr = WMFUtils.CopyAttributeData(incomingVideoMediaType, encoderType, MFAttributesClsid.MF_MT_INTERLACE_MODE); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed copying the MF_MT_INTERLACE_MODE, retVal=" + hr.ToString()); } // add a stream to the sink writer for the output Media type. The // incomingVideoMediaType specifies the format of the samples that will // be written to the file. Note that it does not necessarily need to // match the input format. hr = workingSinkWriter.AddStream(encoderType, out sinkWriterVideoStreamId); if (hr != HResult.S_OK) { // we failed throw new Exception("StartRecording Failed adding the output stream(v), retVal=" + hr.ToString()); } // Windows 10, by default, provides an adequate set of codecs which the Sink Writer can // find to write out the MP4 file. This is not true on Windows 7. // If we are not on Windows 10 we register (locally) a codec // the Sink Writer can find and use. The ColorConvertDMO is supplied by // microsoft it is just not available to enumerate on Win7 etc. // Making it available locally does not require administrator privs // but only this process can see it and it disappears when the process // closes OperatingSystem os = Environment.OSVersion; int versionID = ((os.Version.Major * 10) + os.Version.Minor); if (versionID < 62) { Guid ColorConverterDMOGUID = new Guid("98230571-0087-4204-b020-3282538e57d3"); // Register the color converter DSP for this process, in the video // processor category. This will enable the sink writer to enumerate // the color converter when the sink writer attempts to match the // media types. hr = MFExtern.MFTRegisterLocalByCLSID( ColorConverterDMOGUID, MFTransformCategory.MFT_CATEGORY_VIDEO_PROCESSOR, "", MFT_EnumFlag.SyncMFT, 0, null, 0, null ); } // Set the input format for a stream on the sink writer. Note the use of the stream index here // The input format does not have to match the target format that is written to the media sink // If the formats do not match, this call attempts to load an transform // that can convert from the input format to the target format. If it cannot find one, (and this is not // a sure thing), it will throw an exception. hr = workingSinkWriter.SetInputMediaType(sinkWriterVideoStreamId, incomingVideoMediaType, null); if (hr != HResult.S_OK) { // we failed throw new Exception("StartRecording Failed on calling SetInputMediaType(v) on the writer, retVal=" + hr.ToString()); } // set this flag now wantTimebaseRebase = wantTimebaseRebaseIn; // now we initialize the sink writer for writing. We call this method after configuring the // input streams but before we send any data to the sink writer. The underlying media sink must // have at least one input stream and we know it does because we set it up above hr = workingSinkWriter.BeginWriting(); if (hr != HResult.S_OK) { // we failed throw new Exception("StartRecording Failed on calling BeginWriting on the writer, retVal=" + hr.ToString()); } } return(0); }
/// <summary> /// This is the routine that performs the transform. Unless the sinkWriter object /// is set all we do is pass the sample on. If the sink writer object is set /// we give the sample to it for writing. There are two modes - one where we just /// give the sinkwriter the input sample and the other where we clone the input /// sample and rebase the timestamps. /// /// An override of the abstract version in MFTBase_Sync. /// </summary> /// <param name="pOutputSamples">The structure to populate with output values.</param> /// <returns>S_Ok unless error.</returns> protected override HResult OnProcessOutput(ref MFTOutputDataBuffer outputSampleDataStruct) { HResult hr = HResult.S_OK; IMFMediaBuffer inputMediaBuffer = null; IMFSample sinkWriterSample = null; IMFAttributes sampleAttributes; // in this MFT we are processing in place, the input sample is the output sample, the media buffer of the // input sample is the media buffer of the output sample. Thats for the pipeline. If a sink writer exists // we also write the sample data out to the sink writer. This provides the effect of displaying on the // screen and simultaneously recording. // There are two ways the sink writer can be given the media sample data. It can just be given the // input sample directly or a copy of the sample can be made and that copy given to the sink writer. // There is also an additional complication - the sample has a timestamp and video cameras tend // to just use the current date and time as a timestamp. There are several reports that MP4 files // need to have their first frame starting at zero and then every subsequent frame adjusted to that // new base time. Certainly the Microsoft supplied example code (and see the // CaptureToFileViaReaderWriter also) take great care to do this. This requirement does not // seem to exist - my tests indicate it is not necessary to start from 0 in the mp4 file. Maybe the // Sink Writer has been improved and now does this automatically. For demonstration purposes // the timebase-rebase functionality has been included and choosing that mode copies the sample // and resets the time. If the user does not rebase the time we simply send the input sample // to the Sink Writer as-is. try { // Set status flags. outputSampleDataStruct.dwStatus = MFTOutputDataBufferFlags.None; // The output sample is the input sample. We get a new IUnknown for the Input // sample since we are going to release it below. The client will release this // new IUnknown outputSampleDataStruct.pSample = Marshal.GetIUnknownForObject(InputSample); // are we recording? if (workingSinkWriter != null) { // we do everything in a lock lock (sinkWriterLockObject) { // are we in timebase rebase mode? if (wantTimebaseRebase == false) { // we are not. Just give the input sample to the Sink Writer which will // write it out. hr = workingSinkWriter.WriteSample(sinkWriterVideoStreamId, InputSample); if (hr != HResult.S_OK) { throw new Exception("OnProcessOutput call to WriteSample(a) failed. Err=" + hr.ToString()); } } else { // the timebase rebase option has been chosen. We need to create a copy of the input sample // so we can adjust the time on it. // Get the data buffer from the input sample. If the sample contains more than one buffer, // this method copies the data from the original buffers into a new buffer, and replaces // the original buffer list with the new buffer. The new buffer is returned in the inputMediaBuffer parameter. // If the sample contains a single buffer, this method returns a pointer to the original buffer. // In typical use, most samples do not contain multiple buffers. hr = InputSample.ConvertToContiguousBuffer(out inputMediaBuffer); if (hr != HResult.S_OK) { throw new Exception("OnProcessOutput call to InputSample.ConvertToContiguousBuffer failed. Err=" + hr.ToString()); } // get some other things from the input sample hr = InputSample.GetSampleDuration(out long sampleDuration); if (hr != HResult.S_OK) { throw new Exception("OnProcessOutput call to InputSample.GetSampleDuration failed. Err=" + hr.ToString()); } hr = InputSample.GetTotalLength(out int sampleSize); if (hr != HResult.S_OK) { throw new Exception("OnProcessOutput call to InputSample.GetTotalLength failed. Err=" + hr.ToString()); } hr = InputSample.GetSampleTime(out long sampleTimeStamp); if (hr != HResult.S_OK) { throw new Exception("OnProcessOutput call to InputSample.GetSampleTime failed. Err=" + hr.ToString()); } // get the attributes from the input sample if (InputSample is IMFAttributes) { sampleAttributes = (InputSample as IMFAttributes); } else { sampleAttributes = null; } // we have all the information we need to create a new output sample sinkWriterSample = WMFUtils.CreateMediaSampleFromBuffer(sampleTimeStamp, sampleDuration, inputMediaBuffer, sampleSize, sampleAttributes); if (sinkWriterSample == null) { throw new Exception("OnProcessOutput, Error on call to CreateMediaSampleFromBuffer sinkWriterSample == null"); } // we have a sample, if so is it the first non null one? if (isFirstSample) { // yes it is set up our timestamp firstSampleBaseTime = sampleTimeStamp; isFirstSample = false; } // rebase the time stamp sampleTimeStamp -= firstSampleBaseTime; hr = sinkWriterSample.SetSampleTime(sampleTimeStamp); if (hr != HResult.S_OK) { throw new Exception("OnProcessOutput call to InputSample.SetSampleTime failed. Err=" + hr.ToString()); } // write the sample out hr = workingSinkWriter.WriteSample(sinkWriterVideoStreamId, sinkWriterSample); if (hr != HResult.S_OK) { throw new Exception("OnProcessOutput call to WriteSample(b) failed. Err=" + hr.ToString()); } } } } } finally { // clean up if (inputMediaBuffer != null) { Marshal.ReleaseComObject(inputMediaBuffer); } if (sinkWriterSample != null) { Marshal.ReleaseComObject(sinkWriterSample); } // Release the current input sample so we can get another one. // the act of setting it to null releases it because the property // is coded that way InputSample = null; } return(HResult.S_OK); }
public override void StartStream() { Task.Run(() => { // check our source filename is correct and usable StreamReaderHandler = new StreamHandler(ProcessFrame, MFBPressed); HResult hr; IMFSourceResolver pSourceResolver = null; IMFPresentationDescriptor sourcePresentationDescriptor = null; IMFStreamDescriptor videoStreamDescriptor = null; try { // reset everything CloseAllMediaDevices(); // Create the media session. hr = MFExtern.MFCreateMediaSession(null, out mediaSession); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. Err=" + hr.ToString()); } // set up our media session call back handler. mediaSessionAsyncCallbackHandler = new AsyncCallbackHandler(); mediaSessionAsyncCallbackHandler.Initialize(); mediaSessionAsyncCallbackHandler.MediaSession = mediaSession; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackError = HandleMediaSessionAsyncCallBackErrors; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackEvent = HandleMediaSessionAsyncCallBackEvent; // Register the callback handler with the session and tell it that events can // start. This does not actually trigger an event it just lets the media session // know that it can now send them if it wishes to do so. hr = mediaSession.BeginGetEvent(mediaSessionAsyncCallbackHandler, null); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to mediaSession.BeginGetEvent failed. Err=" + hr.ToString()); } StreamReader = WMFUtils.CreateSourceReaderAsyncFromDevice(UnderlyingDevice, StreamReaderHandler); try { SetCurrentMediaType(StreamReader, IMFStreamIndex, 0); } catch { } try { SetCurrentMediaType(StreamReader, IMFSnapIndex, 1); } catch { } StreamReaderHandler.StreamReader = StreamReader; StreamReader.GetCurrentMediaType(1, out IMFMediaType ppMediaType); StreamReader.GetNativeMediaType(1, 0, out IMFMediaType PPMediaType); StreamReader.GetCurrentMediaType(0, out IMFMediaType ppMediaType1); StreamReader.GetNativeMediaType(0, 0, out IMFMediaType PPMediaType1); ppMediaType.GetMajorType(out Guid ppMediaTypeMGUID); PPMediaType.GetMajorType(out Guid PPMediaTypeMGUID); ppMediaType.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid ppMediaTypeSGUID); PPMediaType.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid PPMediaTypeSGUID); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeMGUID)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeMGUID)); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeSGUID)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeSGUID)); ppMediaType1.GetMajorType(out Guid ppMediaTypeMGUID1); PPMediaType1.GetMajorType(out Guid PPMediaTypeMGUID1); ppMediaType1.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid ppMediaTypeSGUID1); PPMediaType1.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid PPMediaTypeSGUID1); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeMGUID1)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeMGUID1)); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeSGUID1)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeSGUID1)); Paused = false; StreamReader.SetStreamSelection(0, true); StreamReader.SetStreamSelection(1, true); hr = StreamReader.ReadSample( 0, MediaFoundation.ReadWrite.MF_SOURCE_READER_CONTROL_FLAG.None, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero ); hr = StreamReader.ReadSample(1, MediaFoundation.ReadWrite.MF_SOURCE_READER_CONTROL_FLAG.None, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero ); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed on calling the first ReadSample on the reader, retVal=" + hr.ToString()); } } catch (Exception ex) { Console.WriteLine("PrepareSessionAndTopology Error: " + ex.Message); } finally { // Clean up if (pSourceResolver != null) { Marshal.ReleaseComObject(pSourceResolver); } if (sourcePresentationDescriptor != null) { Marshal.ReleaseComObject(sourcePresentationDescriptor); } if (videoStreamDescriptor != null) { Marshal.ReleaseComObject(videoStreamDescriptor); } } }); }
private List <MFVideoFormatContainer> GetSupportedFormats(int SourceIndex, string FriendlyName) { MFDevice UnderlyingDevice = null; List <MFDevice> vcDevices = WMFUtils.GetDevicesByCategory(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, CLSID.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); foreach (MFDevice device in vcDevices) { if (device.FriendlyName == FriendlyName) { UnderlyingDevice = device; break; } } if (UnderlyingDevice != null) { IMFPresentationDescriptor sourcePresentationDescriptor = null; IMFStreamDescriptor videoStreamDescriptor = null; IMFMediaTypeHandler typeHandler = null; List <MFVideoFormatContainer> formatList = new List <MFVideoFormatContainer>(); HResult hr; IMFMediaSource mediaSource = null; try { // use the device symbolic name to create the media source for the video device. Media sources are objects that generate media data. // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each // media source contains one or more streams, and each stream delivers data of one type, such as audio or video. mediaSource = WMFUtils.GetMediaSourceFromDevice(UnderlyingDevice); if (mediaSource == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource == null"); } // A presentation is a set of related media streams that share a common presentation time. // we don't need that functionality in this app but we do need to presentation descriptor // to find out the stream descriptors, these will give us the media types on offer hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString()); } if (sourcePresentationDescriptor == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null"); } // Now we get the number of stream descriptors in the presentation. // A presentation descriptor contains a list of one or more // stream descriptors. hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out int sourceStreamCount); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString()); } if (sourceStreamCount == 0) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0"); } // look for the video stream // we require the major type to be video Guid guidMajorType = WMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, SourceIndex); if (guidMajorType != MFMediaType.Video) { return(new List <MFVideoFormatContainer>()); } // we also require the stream to be enabled sourcePresentationDescriptor.SelectStream(1); hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(SourceIndex, out bool streamIsSelected, out videoStreamDescriptor); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. Err=" + hr.ToString()); } if (videoStreamDescriptor == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. videoStreamDescriptor == null"); } // if the stream is not selected (enabled) look for the next if (streamIsSelected == false) { Marshal.ReleaseComObject(videoStreamDescriptor); videoStreamDescriptor = null; return(new List <MFVideoFormatContainer>()); } // Get the media type handler for the stream. IMFMediaTypeHandler // interface is a standard way of looking at the media types on an stream hr = videoStreamDescriptor.GetMediaTypeHandler(out typeHandler); if (hr != HResult.S_OK) { throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. Err=" + hr.ToString()); } if (typeHandler == null) { throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. typeHandler == null"); } // Now we get the number of media types in the stream descriptor. hr = typeHandler.GetMediaTypeCount(out int mediaTypeCount); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. Err=" + hr.ToString()); } if (mediaTypeCount == 0) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. mediaTypeCount == 0"); } // now loop through each media type for (int mediaTypeId = 0; mediaTypeId < mediaTypeCount; mediaTypeId++) { // Now we have the handler, get the media type. hr = typeHandler.GetMediaTypeByIndex(mediaTypeId, out IMFMediaType workingMediaType); if (hr != HResult.S_OK) { throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. Err=" + hr.ToString()); } if (workingMediaType == null) { throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. workingMediaType == null"); } MFVideoFormatContainer tmpContainer = MediaTypeInfo.GetVideoFormatContainerFromMediaTypeObject(workingMediaType, UnderlyingDevice); if (tmpContainer == null) { // we failed throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetVideoFormatContainerFromMediaTypeObject"); } // now add it formatList.Add(tmpContainer); Marshal.ReleaseComObject(workingMediaType); workingMediaType = null; } return(formatList); } finally { // close and release if (mediaSource != null) { Marshal.ReleaseComObject(mediaSource); } if (sourcePresentationDescriptor != null) { Marshal.ReleaseComObject(sourcePresentationDescriptor); } if (videoStreamDescriptor != null) { Marshal.ReleaseComObject(videoStreamDescriptor); } if (typeHandler != null) { Marshal.ReleaseComObject(typeHandler); } } } return(new List <MFVideoFormatContainer>()); }
/// <summary> /// Gets a list of all supported video formats from a media type /// as a nice displayable bit of text. outSb will never be null can be /// empty. /// /// Adapted from /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="mediaTypeObj">the media type object</param> /// <param name="outSb">The output string</param> public static HResult GetSupportedFormatsFromMediaTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb) { // we always return something here outSb = new StringBuilder(); // sanity check if (mediaTypeObj == null) { return(HResult.E_FAIL); } // Retrieves the number of attributes that are set on this object. HResult hr = mediaTypeObj.GetCount(out int attributeCount); if (hr != HResult.S_OK) { // if we failed here, bail out outSb.Append("failed getting attributeCount, retVal=" + hr.ToString()); outSb.Append("\r\n"); return(HResult.E_FAIL); } // MF_MT_MAJOR_TYPE // Major type GUID, we return this as human readable text hr = mediaTypeObj.GetMajorType(out Guid majorType); if (hr == HResult.S_OK) { // only report success outSb.Append("MF_MT_MAJOR_TYPE=" + WMFUtils.ConvertGuidToName(majorType) + ", "); } // MF_MT_SUBTYPE // Subtype GUID which describes the basic media type, we return this as human readable text hr = mediaTypeObj.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid subType); if (hr == HResult.S_OK) { // only report success outSb.Append("MF_MT_SUBTYPE=" + WMFUtils.ConvertGuidToName(subType) + ", "); } // MF_MT_FRAME_SIZE // the Width and height of a video frame, in pixels hr = MFExtern.MFGetAttributeSize(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_SIZE, out int frameSizeWidth, out int frameSizeHeight); if (hr == HResult.S_OK) { // only report success outSb.Append("MF_MT_FRAME_SIZE (W,H)=(" + frameSizeWidth.ToString() + "," + frameSizeHeight.ToString() + "), "); } // MF_MT_FRAME_RATE // The frame rate is expressed as a ratio.The upper 32 bits of the attribute value contain the numerator and the lower 32 bits contain the denominator. // For example, if the frame rate is 30 frames per second(fps), the ratio is 30 / 1.If the frame rate is 29.97 fps, the ratio is 30,000 / 1001. // we report this back to the user as a decimal hr = MFExtern.MFGetAttributeRatio(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_RATE, out int frameRate, out int frameRateDenominator); if (hr == HResult.S_OK) { // only report success if (frameRateDenominator < 0) { outSb.Append("MF_MT_FRAME_RATE (frames/s)=(undefined),"); } else { outSb.Append("MF_MT_FRAME_RATE=" + ((decimal)frameRate / (decimal)frameRateDenominator).ToString() + "f/s, "); } } // MF_MT_FRAME_RATE_RANGE_MIN // The frame rate is expressed as a ratio.The upper 32 bits of the attribute value contain the numerator and the lower 32 bits contain the denominator. // For example, if the frame rate is 30 frames per second(fps), the ratio is 30 / 1.If the frame rate is 29.97 fps, the ratio is 30,000 / 1001. // we report this back to the user as a decimal hr = MFExtern.MFGetAttributeRatio(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_RATE_RANGE_MIN, out int frameRateMin, out int frameRateMinDenominator); if (hr == HResult.S_OK) { // only report success if (frameRateMinDenominator < 0) { outSb.Append("MF_MT_FRAME_RATE_RANGE_MIN (frames/s)=(undefined),"); } else { outSb.Append("MF_MT_FRAME_RATE_RANGE_MIN=" + ((decimal)frameRateMin / (decimal)frameRateMinDenominator).ToString() + "f/s, "); } } // MF_MT_FRAME_RATE_RANGE_MAX // The frame rate is expressed as a ratio.The upper 32 bits of the attribute value contain the numerator and the lower 32 bits contain the denominator. // For example, if the frame rate is 30 frames per second(fps), the ratio is 30 / 1.If the frame rate is 29.97 fps, the ratio is 30,000 / 1001. // we report this back to the user as a decimal hr = MFExtern.MFGetAttributeRatio(mediaTypeObj, MFAttributesClsid.MF_MT_FRAME_RATE_RANGE_MAX, out int frameRateMax, out int frameRateMaxDenominator); if (hr == HResult.S_OK) { // only report success if (frameRateMaxDenominator < 0) { outSb.Append("MF_MT_FRAME_RATE_RANGE_MAX (frames/s)=(undefined),"); } else { outSb.Append("MF_MT_FRAME_RATE_RANGE_MAX=" + ((decimal)frameRateMax / (decimal)frameRateMaxDenominator).ToString() + "f/s, "); } } // enumerate all of the possible Attributes so we can see which ones are present that we did not report on hr = EnumerateAllAttributeNamesInMediaTypeAsText(mediaTypeObj, attributeCount, out StringBuilder allAttrs); if (hr == HResult.S_OK) { outSb.Append("\r\n"); outSb.Append(" AllAttrs=" + allAttrs.ToString()); } return(HResult.S_OK); }