public HResult GetMediaTypeHandler(out IMFMediaTypeHandler ppHandler) { Debug.WriteLine("StreamSink:GetMediaTypeHandler"); ppHandler = null; HResult hr; lock (this) { hr = CheckShutdown(); if (Failed(hr)) { return(hr); } try { ppHandler = (IMFMediaTypeHandler)this; hr = S_OK; }catch (InvalidCastException ex) { hr = E_NOINTERFACE; } LogIfFailed(hr); } return(hr); }
public void SetDeviceFormat(int dwFormatIndex) { if (m_pSource == null) { CreateVideoCaptureSource(); } IMFPresentationDescriptor pPD = null; IMFStreamDescriptor pSD = null; IMFMediaTypeHandler pHandler = null; IMFMediaType pType = null; int hr = m_pSource.CreatePresentationDescriptor(out pPD); MFError.ThrowExceptionForHR(hr); bool fSelected; hr = pPD.GetStreamDescriptorByIndex(0, out fSelected, out pSD); MFError.ThrowExceptionForHR(hr); hr = pSD.GetMediaTypeHandler(out pHandler); MFError.ThrowExceptionForHR(hr); hr = pHandler.GetMediaTypeByIndex(dwFormatIndex, out pType); MFError.ThrowExceptionForHR(hr); hr = pHandler.SetCurrentMediaType(pType); MFError.ThrowExceptionForHR(hr); Marshal.FinalReleaseComObject(pPD); Marshal.FinalReleaseComObject(pSD); Marshal.FinalReleaseComObject(pHandler); Marshal.FinalReleaseComObject(pType); }
public CaptureFormat[] GetCaptureFormats() { if (m_pSource == null) { CreateVideoCaptureSource(); } IMFPresentationDescriptor pPD = null; IMFStreamDescriptor pSD = null; IMFMediaTypeHandler pHandler = null; IMFMediaType pType = null; int hr = m_pSource.CreatePresentationDescriptor(out pPD); MFError.ThrowExceptionForHR(hr); bool fSelected; hr = pPD.GetStreamDescriptorByIndex(0, out fSelected, out pSD); MFError.ThrowExceptionForHR(hr); hr = pSD.GetMediaTypeHandler(out pHandler); MFError.ThrowExceptionForHR(hr); int cTypes = 0; hr = pHandler.GetMediaTypeCount(out cTypes); MFError.ThrowExceptionForHR(hr); CaptureFormat[] captureFormats = new CaptureFormat[cTypes]; for (int i = 0; i < cTypes; i++) { hr = pHandler.GetMediaTypeByIndex(i, out pType); MFError.ThrowExceptionForHR(hr); CaptureFormat mediatype = LogMediaType(pType); Trace.WriteLine(mediatype); Trace.WriteLine("Media Type " + i.ToString()); captureFormats[i] = mediatype; //OutputDebugString(L"\n"); Marshal.FinalReleaseComObject(pType); } Marshal.FinalReleaseComObject(pPD); Marshal.FinalReleaseComObject(pSD); Marshal.FinalReleaseComObject(pHandler); Marshal.FinalReleaseComObject(pType); return(captureFormats); }
//------------------------------------------------------------------- // Name: GetMediaTypeHandler // Description: Returns a media type handler for this stream. //------------------------------------------------------------------- public int GetMediaTypeHandler(out IMFMediaTypeHandler ppHandler) { TRACE("CWavStream::GetMediaTypeHandler"); lock (this) { CheckShutdown(); // This stream object acts as its own type handler, so we QI ourselves. ppHandler = (IMFMediaTypeHandler)this; } return S_Ok; }
private static HResult CreateOutputNode(IMFStreamDescriptor streamDescriptor, out IMFMediaSinkAlt mediaSink, out IMFTopologyNode node) { HResult hr = S_OK; mediaSink = null; node = null; IMFMediaTypeHandler mediaTypeHandler = null; IMFActivate activate = null; IMFStreamSinkAlt streamSink = null; Guid majorType = Guid.Empty; int streamSinkCount = 0; hr = streamDescriptor.GetMediaTypeHandler(out mediaTypeHandler); if (Failed(hr)) { return(hr); } hr = mediaTypeHandler.GetMajorType(out majorType); if (Failed(hr)) { SafeRelease(mediaTypeHandler); return(hr); } hr = MF.CreateTopologyNode(MFTopologyType.OutputNode, out node); if (Failed(hr)) { SafeRelease(mediaTypeHandler); return(hr); } if (majorType == MFMediaType.Video) { ExternalMediaSink extMediaSink = new ExternalMediaSink(); mediaSink = (IMFMediaSinkAlt)extMediaSink; hr = mediaSink.GetStreamSinkCount(out streamSinkCount); if (Failed(hr)) { mediaSink = null; return(hr); } hr = mediaSink.GetStreamSinkByIndex(0, out streamSink); if (Failed(hr)) { mediaSink = null; return(hr); } hr = node.SetObject(streamSink); if (Failed(hr)) { mediaSink = null; return(hr); } } else if (majorType == MFMediaType.Audio) { hr = MF.CreateAudioRendererActivate(out activate); if (Failed(hr)) { return(hr); } hr = node.SetObject(activate); if (Failed(hr)) { return(hr); } } mediaTypeHandler = null; activate = null; streamSink = null; return(hr); }
protected void CreateOutputNode( IMFStreamDescriptor pSourceSD, out IMFTopologyNode ppNode ) { IMFTopologyNode pNode = null; IMFMediaTypeHandler pHandler = null; IMFActivate pRendererActivate = null; Guid guidMajorType = Guid.Empty; MFError throwonhr; // Get the stream ID. int streamID = 0; try { HResult hr; hr = pSourceSD.GetStreamIdentifier(out streamID); // Just for debugging, ignore any failures. if (MFError.Failed(hr)) { //TRACE("IMFStreamDescriptor::GetStreamIdentifier" + hr.ToString()); } // Get the media type handler for the stream. throwonhr = pSourceSD.GetMediaTypeHandler(out pHandler); // Get the major media type. throwonhr = pHandler.GetMajorType(out guidMajorType); // Create a downstream node. throwonhr = MFExtern.MFCreateTopologyNode(MFTopologyType.OutputNode, out pNode); // Create an IMFActivate object for the renderer, based on the media type. if (MFMediaType.Audio == guidMajorType) { // Create the audio renderer. //TRACE(string.Format("Stream {0}: audio stream", streamID)); throwonhr = MFExtern.MFCreateAudioRendererActivate(out pRendererActivate); // Set the IActivate object on the output node. throwonhr = pNode.SetObject(pRendererActivate); } else if (MFMediaType.Video == guidMajorType) { // Create the video renderer. //TRACE(string.Format("Stream {0}: video stream", streamID)); //throwonhr = MFExtern.MFCreateVideoRendererActivate(m_hwndVideo, out pRendererActivate); mIMFTopologyNode.GetObject(out pRendererActivate); throwonhr = pNode.SetObject(pRendererActivate); } else { //TRACE(string.Format("Stream {0}: Unknown format", streamID)); //throw new COMException("Unknown format", (int)HResult.E_FAIL); } // Return the IMFTopologyNode pointer to the caller. ppNode = pNode; } catch { // If we failed, release the pNode //SafeRelease(pNode); throw; } finally { // Clean up. //SafeRelease(pHandler); //SafeRelease(pRendererActivate); } }
protected void CreateOutputNode( IMFStreamDescriptor pSourceSD, out IMFTopologyNode ppNode ) { IMFTopologyNode pNode = null; IMFMediaTypeHandler pHandler = null; IMFActivate pRendererActivate = null; Guid guidMajorType = Guid.Empty; int hr = S_Ok; // Get the stream ID. int streamID = 0; try { try { pSourceSD.GetStreamIdentifier(out streamID); // Just for debugging, ignore any failures. } catch { TRACE("IMFStreamDescriptor::GetStreamIdentifier" + hr.ToString()); } // Get the media type handler for the stream. pSourceSD.GetMediaTypeHandler(out pHandler); // Get the major media type. pHandler.GetMajorType(out guidMajorType); // Create a downstream node. MFExtern.MFCreateTopologyNode(MFTopologyType.OutputNode, out pNode); // Create an IMFActivate object for the renderer, based on the media type. if (MFMediaType.Audio == guidMajorType) { // Create the audio renderer. TRACE(string.Format("Stream {0}: audio stream", streamID)); MFExtern.MFCreateAudioRendererActivate(out pRendererActivate); } else if (MFMediaType.Video == guidMajorType) { // Create the video renderer. TRACE(string.Format("Stream {0}: video stream", streamID)); MFExtern.MFCreateVideoRendererActivate(m_hwndVideo, out pRendererActivate); object ppv; pRendererActivate.ActivateObject(typeof(IMFVideoRenderer).GUID, out ppv); var renderer = ppv as IMFVideoRenderer; m_customPresenter = EvrPresenter.CreateNew(); m_customPresenter.NewAllocatorFrame += m_customPresenter_NewAllocatorFrame; m_customPresenter.NewAllocatorSurface += m_customPresenter_NewAllocatorSurface; var presenter = m_customPresenter.VideoPresenter as IMFVideoDisplayControl; hr = presenter.SetVideoWindow(m_hwndVideo); hr = renderer.InitializeRenderer(null, m_customPresenter.VideoPresenter); var settings = presenter as IEVRPresenterSettings; settings.SetBufferCount(5); } else { TRACE(string.Format("Stream {0}: Unknown format", streamID)); throw new COMException("Unknown format", E_Fail); } // Set the IActivate object on the output node. pNode.SetObject(pRendererActivate); // Return the IMFTopologyNode pointer to the caller. ppNode = pNode; } catch { // If we failed, release the pNode SafeRelease(pNode); throw; } finally { // Clean up. SafeRelease(pHandler); SafeRelease(pRendererActivate); } }
protected IMFTopologyNode CreateOutputNode(IMFStreamDescriptor pSourceSD) { IMFTopologyNode pNode = null; IMFMediaTypeHandler pHandler = null; IMFActivate pRendererActivate = null; Guid guidMajorType = Guid.Empty; int hr = 0; // Get the stream ID. int streamID = 0; try { try { hr = pSourceSD.GetStreamIdentifier(out streamID); // Just for debugging, ignore any failures. MFError.ThrowExceptionForHR(hr); } catch { //TRACE("IMFStreamDescriptor::GetStreamIdentifier" + hr.ToString()); } // Get the media type handler for the stream. hr = pSourceSD.GetMediaTypeHandler(out pHandler); MFError.ThrowExceptionForHR(hr); // Get the major media type. hr = pHandler.GetMajorType(out guidMajorType); MFError.ThrowExceptionForHR(hr); // Create a downstream node. hr = MFExtern.MFCreateTopologyNode(MFTopologyType.OutputNode, out pNode); MFError.ThrowExceptionForHR(hr); // Create an IMFActivate object for the renderer, based on the media type. if (MFMediaType.Audio == guidMajorType) { // Create the audio renderer. hr = MFExtern.MFCreateAudioRendererActivate(out pRendererActivate); MFError.ThrowExceptionForHR(hr); object sar; pRendererActivate.ActivateObject(typeof(IMFMediaSink).GUID, out sar); StreamingAudioRenderer = sar as IMFMediaSink; } else if (MFMediaType.Video == guidMajorType) { // Create the video renderer. pRendererActivate = CreateVideoRenderer(); } else { //TRACE(string.Format("Stream {0}: Unknown format", streamID)); throw new COMException("Unknown format"); } // Set the IActivate object on the output node. hr = pNode.SetObject(pRendererActivate); MFError.ThrowExceptionForHR(hr); } catch (Exception ex) { // If we failed, release the pNode COMBase.SafeRelease(pNode); throw; } finally { // Clean up. COMBase.SafeRelease(pHandler); COMBase.SafeRelease(pRendererActivate); } return(pNode); }
public static extern void MFCreateSimpleTypeHandler( out IMFMediaTypeHandler ppHandler );
/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Displays the video formats for the currently selected video device. This /// is more complicated than it looks. We have to open the video source, convert /// that to a Media Source and then interrogate the that source to find a list /// of video formats. /// /// NOTE: this function will throw exceptions - caller must trap them /// </summary> /// <history> /// 01 Nov 18 Cynic - Started /// </history> private void DisplayVideoFormatsForCurrentCaptureDevice() { IMFPresentationDescriptor sourcePresentationDescriptor = null; int sourceStreamCount = 0; bool streamIsSelected = false; IMFStreamDescriptor videoStreamDescriptor = null; IMFMediaTypeHandler typeHandler = null; int mediaTypeCount = 0; List <TantaMFVideoFormatContainer> formatList = new List <TantaMFVideoFormatContainer>(); HResult hr; IMFMediaSource mediaSource = null; try { // clear what we have now listViewSupportedFormats.Clear(); // reset this listViewSupportedFormats.ListViewItemSorter = null; // get the currently selected device TantaMFDevice currentDevice = (TantaMFDevice)comboBoxCaptureDevices.SelectedItem; if (currentDevice == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice currentDevice == null"); } // use the device symbolic name to create the media source for the video device. Media sources are objects that generate media data. // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each // media source contains one or more streams, and each stream delivers data of one type, such as audio or video. mediaSource = TantaWMFUtils.GetMediaSourceFromTantaDevice(currentDevice); if (mediaSource == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource == null"); } // A presentation is a set of related media streams that share a common presentation time. // we don't need that functionality in this app but we do need to presentation descriptor // to find out the stream descriptors, these will give us the media types on offer hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString()); } if (sourcePresentationDescriptor == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null"); } // Now we get the number of stream descriptors in the presentation. // A presentation descriptor contains a list of one or more // stream descriptors. hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out sourceStreamCount); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString()); } if (sourceStreamCount == 0) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0"); } // look for the video stream for (int i = 0; i < sourceStreamCount; i++) { // we require the major type to be video Guid guidMajorType = TantaWMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, i); if (guidMajorType != MFMediaType.Video) { continue; } // we also require the stream to be enabled hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(i, out streamIsSelected, out videoStreamDescriptor); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. Err=" + hr.ToString()); } if (videoStreamDescriptor == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. videoStreamDescriptor == null"); } // if the stream is not selected (enabled) look for the next if (streamIsSelected == false) { Marshal.ReleaseComObject(videoStreamDescriptor); videoStreamDescriptor = null; continue; } // Get the media type handler for the stream. IMFMediaTypeHandler // interface is a standard way of looking at the media types on an stream hr = videoStreamDescriptor.GetMediaTypeHandler(out typeHandler); if (hr != HResult.S_OK) { throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. Err=" + hr.ToString()); } if (typeHandler == null) { throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. typeHandler == null"); } // Now we get the number of media types in the stream descriptor. hr = typeHandler.GetMediaTypeCount(out mediaTypeCount); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. Err=" + hr.ToString()); } if (mediaTypeCount == 0) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. mediaTypeCount == 0"); } // now loop through each media type for (int mediaTypeId = 0; mediaTypeId < mediaTypeCount; mediaTypeId++) { // Now we have the handler, get the media type. IMFMediaType workingMediaType = null; hr = typeHandler.GetMediaTypeByIndex(mediaTypeId, out workingMediaType); if (hr != HResult.S_OK) { throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. Err=" + hr.ToString()); } if (workingMediaType == null) { throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. workingMediaType == null"); } TantaMFVideoFormatContainer tmpContainer = TantaMediaTypeInfo.GetVideoFormatContainerFromMediaTypeObject(workingMediaType, currentDevice); if (tmpContainer == null) { // we failed throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetVideoFormatContainerFromMediaTypeObject"); } // now add it formatList.Add(tmpContainer); Marshal.ReleaseComObject(workingMediaType); workingMediaType = null; } // NOTE: we only do the first enabled video stream we find. // it is possible to have more but our control // cannot cope with that break; } // now display the formats foreach (TantaMFVideoFormatContainer videoFormat in formatList) { ListViewItem lvi = new ListViewItem(new[] { videoFormat.SubTypeAsString, videoFormat.FrameSizeAsString, videoFormat.FrameRateAsString, videoFormat.FrameRateMaxAsString, videoFormat.AllAttributes }); lvi.Tag = videoFormat; listViewSupportedFormats.Items.Add(lvi); } listViewSupportedFormats.Columns.Add("Type", 70); listViewSupportedFormats.Columns.Add("FrameSize WxH", 100); listViewSupportedFormats.Columns.Add("FrameRate f/s", 100); listViewSupportedFormats.Columns.Add("FrameRateMax f/s", 100); listViewSupportedFormats.Columns.Add("All Attributes", 2500); } finally { // close and release if (mediaSource != null) { Marshal.ReleaseComObject(mediaSource); mediaSource = null; } if (sourcePresentationDescriptor != null) { Marshal.ReleaseComObject(sourcePresentationDescriptor); sourcePresentationDescriptor = null; } if (videoStreamDescriptor != null) { Marshal.ReleaseComObject(videoStreamDescriptor); videoStreamDescriptor = null; } if (typeHandler != null) { Marshal.ReleaseComObject(typeHandler); typeHandler = null; } } }
private List <MFVideoFormatContainer> GetSupportedFormats(int SourceIndex, string FriendlyName) { MFDevice UnderlyingDevice = null; List <MFDevice> vcDevices = WMFUtils.GetDevicesByCategory(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, CLSID.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); foreach (MFDevice device in vcDevices) { if (device.FriendlyName == FriendlyName) { UnderlyingDevice = device; break; } } if (UnderlyingDevice != null) { IMFPresentationDescriptor sourcePresentationDescriptor = null; IMFStreamDescriptor videoStreamDescriptor = null; IMFMediaTypeHandler typeHandler = null; List <MFVideoFormatContainer> formatList = new List <MFVideoFormatContainer>(); HResult hr; IMFMediaSource mediaSource = null; try { // use the device symbolic name to create the media source for the video device. Media sources are objects that generate media data. // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each // media source contains one or more streams, and each stream delivers data of one type, such as audio or video. mediaSource = WMFUtils.GetMediaSourceFromDevice(UnderlyingDevice); if (mediaSource == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource == null"); } // A presentation is a set of related media streams that share a common presentation time. // we don't need that functionality in this app but we do need to presentation descriptor // to find out the stream descriptors, these will give us the media types on offer hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString()); } if (sourcePresentationDescriptor == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null"); } // Now we get the number of stream descriptors in the presentation. // A presentation descriptor contains a list of one or more // stream descriptors. hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out int sourceStreamCount); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString()); } if (sourceStreamCount == 0) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0"); } // look for the video stream // we require the major type to be video Guid guidMajorType = WMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, SourceIndex); if (guidMajorType != MFMediaType.Video) { return(new List <MFVideoFormatContainer>()); } // we also require the stream to be enabled sourcePresentationDescriptor.SelectStream(1); hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(SourceIndex, out bool streamIsSelected, out videoStreamDescriptor); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. Err=" + hr.ToString()); } if (videoStreamDescriptor == null) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. videoStreamDescriptor == null"); } // if the stream is not selected (enabled) look for the next if (streamIsSelected == false) { Marshal.ReleaseComObject(videoStreamDescriptor); videoStreamDescriptor = null; return(new List <MFVideoFormatContainer>()); } // Get the media type handler for the stream. IMFMediaTypeHandler // interface is a standard way of looking at the media types on an stream hr = videoStreamDescriptor.GetMediaTypeHandler(out typeHandler); if (hr != HResult.S_OK) { throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. Err=" + hr.ToString()); } if (typeHandler == null) { throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. typeHandler == null"); } // Now we get the number of media types in the stream descriptor. hr = typeHandler.GetMediaTypeCount(out int mediaTypeCount); if (hr != HResult.S_OK) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. Err=" + hr.ToString()); } if (mediaTypeCount == 0) { throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. mediaTypeCount == 0"); } // now loop through each media type for (int mediaTypeId = 0; mediaTypeId < mediaTypeCount; mediaTypeId++) { // Now we have the handler, get the media type. hr = typeHandler.GetMediaTypeByIndex(mediaTypeId, out IMFMediaType workingMediaType); if (hr != HResult.S_OK) { throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. Err=" + hr.ToString()); } if (workingMediaType == null) { throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. workingMediaType == null"); } MFVideoFormatContainer tmpContainer = MediaTypeInfo.GetVideoFormatContainerFromMediaTypeObject(workingMediaType, UnderlyingDevice); if (tmpContainer == null) { // we failed throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetVideoFormatContainerFromMediaTypeObject"); } // now add it formatList.Add(tmpContainer); Marshal.ReleaseComObject(workingMediaType); workingMediaType = null; } return(formatList); } finally { // close and release if (mediaSource != null) { Marshal.ReleaseComObject(mediaSource); } if (sourcePresentationDescriptor != null) { Marshal.ReleaseComObject(sourcePresentationDescriptor); } if (videoStreamDescriptor != null) { Marshal.ReleaseComObject(videoStreamDescriptor); } if (typeHandler != null) { Marshal.ReleaseComObject(typeHandler); } } } return(new List <MFVideoFormatContainer>()); }