/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Gets the major media type of a IMFMediaType as a text string /// /// Adapted from /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="mediaTypeObj">the media type object</param> /// <param name="outSb">The output string</param> /// <history> /// 01 Nov 18 Cynic - Started /// </history> public static HResult GetMediaSubTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb) { Guid subType; HResult hr; // we always return something here outSb = new StringBuilder(); // sanity check if (mediaTypeObj == null) { return(HResult.E_FAIL); } // MF_MT_SUBTYPE // Subtype GUID which describes the basic media type, we return this as human readable text hr = mediaTypeObj.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out subType); if (hr == HResult.S_OK) { // only report success outSb.Append("MF_MT_SUBTYPE=" + TantaWMFUtils.ConvertGuidToName(subType)); } return(HResult.S_OK); }
/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// A centralized place to close down all media devices. /// </summary> /// <history> /// 01 Nov 18 Cynic - Started /// </history> private void CloseAllMediaDevices() { //HResult hr; LogMessage("CloseAllMediaDevices"); // Shut down the source reader if (sourceReader != null) { Marshal.ReleaseComObject(sourceReader); sourceReader = null; } // close the sink writer if (sinkWriter != null) { // note we could Finalize_() this here but there // is no need. That is done when the stream ends Marshal.ReleaseComObject(sinkWriter); sinkWriter = null; } if (sourceReaderNativeVideoMediaType != null) { Marshal.ReleaseComObject(sourceReaderNativeVideoMediaType); sourceReaderNativeVideoMediaType = null; } if (sourceReaderNativeAudioMediaType != null) { Marshal.ReleaseComObject(sourceReaderNativeAudioMediaType); sourceReaderNativeAudioMediaType = null; } }
/// <summary> /// Gets all the available media types for a particular /// </summary> /// <param name="audioSubtype">Audio subtype - a value from the AudioSubtypes class</param> /// <returns>An array of available media types that can be encoded with this subtype</returns> // Token: 0x0600093B RID: 2363 RVA: 0x0001AB68 File Offset: 0x00018D68 public static MediaType[] GetOutputMediaTypes(Guid audioSubtype) { IMFCollection imfcollection; try { MediaFoundationInterop.MFTranscodeGetAudioOutputAvailableTypes(audioSubtype, _MFT_ENUM_FLAG.MFT_ENUM_FLAG_ALL, null, out imfcollection); } catch (COMException exception) { if (exception.GetHResult() == -1072875819) { return(new MediaType[0]); } throw; } int num; imfcollection.GetElementCount(out num); List <MediaType> list = new List <MediaType>(num); for (int i = 0; i < num; i++) { object obj; imfcollection.GetElement(i, out obj); IMFMediaType mediaType = (IMFMediaType)obj; list.Add(new MediaType(mediaType)); } Marshal.ReleaseComObject(imfcollection); return(list.ToArray()); }
/// <summary> /// Check Major type and Subtype /// </summary> /// <param name="pmt">IMFMediaType to check</param> /// <param name="gMajorType">MajorType to check for.</param> /// <param name="gSubtypes">Array of subTypes to check for.</param> /// <returns>S_Ok if match, else MF_E_INVALIDTYPE.</returns> protected static HResult CheckMediaType(IMFMediaType pmt, Guid gMajorType, Guid[] gSubTypes) { Guid major_type; // Major type must be video. HResult hr = HResult.S_OK; MFError throwonhr; throwonhr = pmt.GetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, out major_type); if (major_type == gMajorType) { Guid subtype; // Get the subtype GUID. throwonhr = pmt.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out subtype); // Look for the subtype in our list of accepted types. hr = HResult.MF_E_INVALIDTYPE; for (int i = 0; i < gSubTypes.Length; i++) { if (subtype == gSubTypes[i]) { hr = HResult.S_OK; break; } } } else { hr = HResult.MF_E_INVALIDTYPE; } return(hr); }
/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Gets the major media type of a IMFMediaType as a text string /// /// Adapted from /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="mediaTypeObj">the media type object</param> /// <param name="outSb">The output string</param> /// <history> /// 01 Nov 18 Cynic - Started /// </history> public static HResult GetMediaMajorTypeAsText(IMFMediaType mediaTypeObj, out StringBuilder outSb) { Guid majorType; HResult hr; // we always return something here outSb = new StringBuilder(); // sanity check if (mediaTypeObj == null) { return(HResult.E_FAIL); } // MF_MT_MAJOR_TYPE // Major type GUID, we return this as human readable text hr = mediaTypeObj.GetMajorType(out majorType); if (hr == HResult.S_OK) { // only report success outSb.Append("MF_MT_MAJOR_TYPE=" + TantaWMFUtils.ConvertGuidToName(majorType)); } return(HResult.S_OK); }
public HResult GetOutputAvailableType( int dwOutputStreamID, int dwTypeIndex, // 0-based out IMFMediaType ppType ) { ppType = null; HResult hr = HResult.S_OK; try { Trace(string.Format("GetOutputAvailableType (stream = {0}, type index = {1})", dwOutputStreamID, dwTypeIndex)); CheckValidStream(dwOutputStreamID); lock (m_TransformLockObject) { // Get the output media type from the derived class. // No need to pass dwOutputStreamID, since it must // always be zero. hr = OnEnumOutputTypes(dwTypeIndex, out ppType); } } catch (Exception e) { hr = (HResult)Marshal.GetHRForException(e); } return(CheckReturn(hr)); }
/// <summary> /// Validates a media type for this transform. /// </summary> /// <param name="pmt">The media type to validate.</param> /// <returns>S_Ok or MF_E_INVALIDTYPE.</returns> /// <remarks>Since both input and output types must be /// the same, they both call this routine.</remarks> private HResult OnCheckMediaType(IMFMediaType pmt) { HResult hr = HResult.S_OK; hr = CheckMediaType(pmt, MFMediaType.Video, m_MediaSubtypes); if (Succeeded(hr)) { int interlace; // Video must be progressive frames. m_MightBeInterlaced = false; MFError throwonhr = pmt.GetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, out interlace); MFVideoInterlaceMode im = (MFVideoInterlaceMode)interlace; // Mostly we only accept Progressive. if (im != MFVideoInterlaceMode.Progressive) { // If the type MIGHT be interlaced, we'll accept it. if (im != MFVideoInterlaceMode.MixedInterlaceOrProgressive) { hr = HResult.MF_E_INVALIDTYPE; } else { // But we will check to see if any samples actually // are interlaced, and reject them. m_MightBeInterlaced = true; } } } return(hr); }
/// <summary> /// Converts the specified wave format into the appropriate Media Foundation audio media type. /// </summary> /// <param name="format">Input wave format to convert.</param> /// <returns>Media Foundation type resulting from conversion.</returns> internal static IMFMediaType CreateMediaType(WaveFormat format) { IMFMediaType mediaType = null; Guid guidSubType = GetMediaSubtype(format); if (guidSubType != Guid.Empty) { // Create the empty media type. mediaType = NativeMethods.MFCreateMediaType(); // Calculate derived values. uint blockAlign = (uint)(format.Channels * (format.BitsPerSample / 8)); uint bytesPerSecond = (uint)(blockAlign * format.SamplesPerSec); // Set attributes on the type. mediaType.SetGUID(Guids.MFMTMajorType, Guids.MFMediaTypeAudio); mediaType.SetGUID(Guids.MFMTSubType, guidSubType); mediaType.SetUINT32(Guids.MFMTAudioNumChannels, format.Channels); mediaType.SetUINT32(Guids.MFMTAudioSamplesPerSecond, format.SamplesPerSec); mediaType.SetUINT32(Guids.MFMTAudioBlockAlignment, blockAlign); mediaType.SetUINT32(Guids.MFMTAudioAvgBytesPerSecond, bytesPerSecond); mediaType.SetUINT32(Guids.MFMTAudioBitsPerSample, format.BitsPerSample); mediaType.SetUINT32(Guids.MFMTAllSamplesIndependent, 1); } return(mediaType); }
public HResult GetOutputCurrentType( int dwOutputStreamID, out IMFMediaType ppType ) { ppType = null; HResult hr = HResult.S_OK; try { Trace("GetOutputCurrentType"); CheckValidStream(dwOutputStreamID); lock (m_TransformLockObject) { if (m_pOutputType != null) { ppType = CloneMediaType(m_pOutputType); } else { // No output type set hr = HResult.MF_E_TRANSFORM_TYPE_NOT_SET; } } } catch (Exception e) { hr = (HResult)Marshal.GetHRForException(e); } return(CheckReturn(hr)); }
public void SetDeviceFormat(int dwFormatIndex) { if (m_pSource == null) { CreateVideoCaptureSource(); } IMFPresentationDescriptor pPD = null; IMFStreamDescriptor pSD = null; IMFMediaTypeHandler pHandler = null; IMFMediaType pType = null; int hr = m_pSource.CreatePresentationDescriptor(out pPD); MFError.ThrowExceptionForHR(hr); bool fSelected; hr = pPD.GetStreamDescriptorByIndex(0, out fSelected, out pSD); MFError.ThrowExceptionForHR(hr); hr = pSD.GetMediaTypeHandler(out pHandler); MFError.ThrowExceptionForHR(hr); hr = pHandler.GetMediaTypeByIndex(dwFormatIndex, out pType); MFError.ThrowExceptionForHR(hr); hr = pHandler.SetCurrentMediaType(pType); MFError.ThrowExceptionForHR(hr); Marshal.FinalReleaseComObject(pPD); Marshal.FinalReleaseComObject(pSD); Marshal.FinalReleaseComObject(pHandler); Marshal.FinalReleaseComObject(pType); }
override protected void OnSetInputType() { IMFMediaType pmt = InputType; if (pmt != null) { m_Alignment = MFExtern.MFGetAttributeUINT32(pmt, MFAttributesClsid.MF_MT_AUDIO_BLOCK_ALIGNMENT, 0); m_AvgBytesPerSec = MFExtern.MFGetAttributeUINT32(pmt, MFAttributesClsid.MF_MT_AUDIO_AVG_BYTES_PER_SECOND, 0); m_SamplesPerSec = MFExtern.MFGetAttributeUINT32(pmt, MFAttributesClsid.MF_MT_AUDIO_SAMPLES_PER_SECOND, 0); m_BitsPerSample = MFExtern.MFGetAttributeUINT32(pmt, MFAttributesClsid.MF_MT_AUDIO_BITS_PER_SAMPLE, 0); m_NumChannels = MFExtern.MFGetAttributeUINT32(pmt, MFAttributesClsid.MF_MT_AUDIO_NUM_CHANNELS, 0); // If the output type isn't set yet, we can pre-populate it, // since output must always exactly equal input. This can // save a (tiny) bit of time in negotiating types. OnSetOutputType(); } else { m_Alignment = 0; m_AvgBytesPerSec = 0; m_SamplesPerSec = 0; m_BitsPerSample = 0; m_NumChannels = 0; // Since the input must be set before the output, nulling the // input must also clear the output. Note that nulling the // input is only valid if we are not actively streaming. OutputType = null; } }
public IMediaEncodingProperties GetAvailableInputTypes(uint streamID, uint typeIndex) { IMFMediaType mediaType = null; _mft.GetInputAvailableType(streamID, typeIndex, out mediaType); return(PropertiesFromMediaType(mediaType)); }
private IMFMediaType MediaTypeFromProperties(IMediaEncodingProperties properties) { IMFMediaType mediaType = null; MFPlat.MFCreateMediaTypeFromProperties(properties, out mediaType); return(mediaType); }
/// <summary> /// Check Major type and Subtype /// </summary> /// <param name="pmt">IMFMediaType to check</param> /// <param name="gMajorType">MajorType to check for.</param> /// <param name="gSubtype">SubType to check for.</param> /// <returns>S_Ok if match, else MF_E_INVALIDTYPE.</returns> protected static HResult CheckMediaType(IMFMediaType pmt, Guid gMajorType, Guid gSubtype) { Guid major_type; // Major type must be video. HResult hr = HResult.S_OK; MFError throwonhr; throwonhr = pmt.GetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, out major_type); if (major_type == gMajorType) { Guid subtype; // Get the subtype GUID. throwonhr = pmt.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out subtype); if (subtype != gSubtype) { hr = HResult.MF_E_INVALIDTYPE; } } else { hr = HResult.MF_E_INVALIDTYPE; } return(hr); }
/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Gets a list of all attributes contained in a media type and displays /// them as a human readable name. More or less just for practice /// /// Adapted from /// https://msdn.microsoft.com/en-us/library/windows/desktop/ee663602(v=vs.85).aspx /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="mediaTypeObj">the media type object</param> /// <param name="maxAttributes">the maximum number of attributes</param> /// <param name="outSb">The output string</param> /// <param name="ignoreMajorType">if true we ignore the major type attribute</param> /// <param name="ignoreSubType">if true we ignore the sub type attribute</param> /// <history> /// 01 Nov 18 Cynic - Started /// </history> public static HResult EnumerateAllAttributeNamesInMediaTypeAsText(IMFMediaType mediaTypeObj, bool ignoreMajorType, bool ignoreSubType, int maxAttributes, out StringBuilder outSb) { // we always return something here outSb = new StringBuilder(); // sanity check if (mediaTypeObj == null) { return(HResult.E_FAIL); } if ((mediaTypeObj is IMFAttributes) == false) { return(HResult.E_FAIL); } // set up to ignore List <string> attributesToIgnore = new List <string>(); if (ignoreMajorType == true) { attributesToIgnore.Add("MF_MT_MAJOR_TYPE"); } if (ignoreSubType == true) { attributesToIgnore.Add("MF_MT_SUBTYPE"); } // just call the generic TantaWMFUtils Attribute Enumerator return(TantaWMFUtils.EnumerateAllAttributeNamesAsText((mediaTypeObj as IMFAttributes), attributesToIgnore, maxAttributes, out outSb)); }
/// <summary> /// Report whether a proposed output type is accepted by the MFT. /// </summary> /// <param name="pmt">The type to check. Should never be null (which are always valid).</param> /// <returns>S_Ok if the type is valid or MF_E_INVALIDTYPE.</returns> /// <remarks>The default behavior is to assume that the input type /// must be set before the output type, and that the proposed output /// type must exactly equal the value returned from the virtual /// CreateOutputFromInput. Override as necessary. /// </remarks> virtual protected HResult OnCheckOutputType(IMFMediaType pmt) { HResult hr = HResult.S_OK; // If the input type is set, see if they match. if (m_pInputType != null) { IMFMediaType pCheck = CreateOutputFromInput(); try { hr = IsIdentical(pmt, pCheck); } finally { SafeRelease(pCheck); } } else { // Input type is not set. hr = HResult.MF_E_TRANSFORM_TYPE_NOT_SET; } return(hr); }
/// <summary> /// Override to allow the client to retrieve the MFT's list of supported Output Types. /// </summary> /// <param name="dwTypeIndex">The (zero-based) index of the type.</param> /// <param name="pOutputType">The output type supported by the MFT.</param> /// <returns>S_Ok or MFError.MF_E_NO_MORE_TYPES.</returns> /// <remarks>By default, assume the input type must be set first, and /// that the output type is the single entry returned from the virtual /// CreateOutputFromInput. Override as needed.</remarks> virtual protected HResult OnEnumOutputTypes(int dwTypeIndex, out IMFMediaType pOutputType) { HResult hr = HResult.S_OK; // If the input type is specified, the output type must be the same. if (m_pInputType != null) { // If the input type is specified, there can be only one output type. if (dwTypeIndex == 0) { pOutputType = CreateOutputFromInput(); } else { pOutputType = null; hr = HResult.MF_E_NO_MORE_TYPES; } } else { pOutputType = null; hr = HResult.MF_E_TRANSFORM_TYPE_NOT_SET; } return(hr); }
public HResult SetCurrentMediaType(IMFMediaType pMediaType) { Debug.WriteLine("StreamSink:SetCurrentMediaType"); HResult hr; lock (this) { hr = CheckShutdown(); if (Failed(hr)) { return(hr); } hr = IsMediaTypeSupported(pMediaType, IntPtr.Zero); SafeRelease(MediaType); MediaType = pMediaType; //TODO: Send Width and Height data to Render if (State != StreamState.Paused) { State = StreamState.Ready; } } return(hr); }
/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Gets a list of all supported video formats from a video source device /// as a list of TantaMFVideoFormatContainer's /// /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="currentDevice">the video device that created the source reader</param> /// <param name="sourceReader">the source reader object</param> /// <param name="maxFormatsToTestFor">the max number of formats we test for</param> /// <param name="formatList">the list of video formats supported by the SourceReader</param> /// <history> /// 01 Nov 18 Cynic - Started /// </history> public static HResult GetSupportedVideoFormatsFromSourceReaderInFormatContainers(TantaMFDevice currentDevice, IMFSourceReaderAsync sourceReader, int maxFormatsToTestFor, out List <TantaMFVideoFormatContainer> formatList) { IMFMediaType mediaTypeObj = null; HResult hr; // init this, we never return null here formatList = new List <TantaMFVideoFormatContainer>(); // sanity check if (currentDevice == null) { return(HResult.E_FAIL); } if (sourceReader == null) { return(HResult.E_FAIL); } try { for (int typeIndex = 0; typeIndex < maxFormatsToTestFor; typeIndex++) { // test this hr = sourceReader.GetNativeMediaType(TantaWMFUtils.MF_SOURCE_READER_FIRST_VIDEO_STREAM, typeIndex, out mediaTypeObj); if (hr == HResult.MF_E_NO_MORE_TYPES) { // we are all done. The outSb container has been populated return(HResult.S_OK); } else if (hr != HResult.S_OK) { // we failed throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetNativeMediaType, retVal=" + hr.ToString()); } // get a format container from the media type TantaMFVideoFormatContainer tmpContainer = GetVideoFormatContainerFromMediaTypeObject(mediaTypeObj, currentDevice); if (tmpContainer == null) { // we failed throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetVideoFormatContainerFromMediaTypeObject"); } // now add it formatList.Add(tmpContainer); } } finally { // always release our mediaType if we have one if (mediaTypeObj != null) { Marshal.ReleaseComObject(mediaTypeObj); mediaTypeObj = null; } } // all done return(HResult.S_OK); }
/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Gets a list of all supported video formats from a video source device /// as a nice displayable bit of text. outSb will never be null but can be /// empty. There will be one line per mediaType /// /// </summary> /// <returns>S_OK for success, nz for fail</returns> /// <param name="sourceReader">the source reader object</param> /// <param name="maxFormatsToTestFor">the max number of formats we test for</param> /// <history> /// 01 Nov 18 Cynic - Started /// </history> public static HResult GetSupportedVideoFormatsFromSourceReaderAsText(IMFSourceReader sourceReader, int maxFormatsToTestFor, out StringBuilder outSb) { IMFMediaType mediaTypeObj = null; HResult hr; // we always return something here outSb = new StringBuilder(); // sanity check if (sourceReader == null) { return(HResult.E_FAIL); } try { for (int typeIndex = 0; typeIndex < maxFormatsToTestFor; typeIndex++) { // test this hr = sourceReader.GetNativeMediaType(TantaWMFUtils.MF_SOURCE_READER_FIRST_VIDEO_STREAM, typeIndex, out mediaTypeObj); if (hr == HResult.MF_E_NO_MORE_TYPES) { // we are all done. The outSb container has been populated return(HResult.S_OK); } else if (hr != HResult.S_OK) { // we failed throw new Exception("GetSupportedVideoFormatsFromSourceReaderAsText failed on call to GetNativeMediaType, retVal=" + hr.ToString()); } // get the formats for this type StringBuilder tmpSb; hr = GetSupportedFormatsFromMediaTypeAsText(mediaTypeObj, out tmpSb); if (hr != HResult.S_OK) { // we failed throw new Exception("GetSupportedVideoFormatsFromSourceReaderAsText failed on call to GetSupportedFormatsFromMediaTypeAsText, retVal=" + hr.ToString()); } // add it here outSb.Append(typeIndex.ToString() + " "); outSb.Append(tmpSb); outSb.Append("\r\n"); outSb.Append("\r\n"); } } finally { // always release our mediaType if we have one if (mediaTypeObj != null) { Marshal.ReleaseComObject(mediaTypeObj); mediaTypeObj = null; } } // all done return(HResult.S_OK); }
public HRESULT IsEqual(IMFMediaType pIMediaType, out uint pdwFlags) { var hr = _type.IsEqual(pIMediaType, out pdwFlags); var flags = (MF_MEDIATYPE_EQUAL_FLAGS)pdwFlags; Trace("type: " + pIMediaType + " flags: " + flags + " hr: " + hr); return(hr); }
private IMediaEncodingProperties PropertiesFromMediaType(IMFMediaType type) { Guid iidAudioEncodingProperties = new Guid("B4002AF6-ACD4-4E5A-A24B-5D7498A8B8C4"); Object properties = null; MFPlat.MFCreatePropertiesFromMediaType(type, ref iidAudioEncodingProperties, out properties); return(properties as IMediaEncodingProperties); }
HResult ConfigureEncoder( EncodingParameters eparams, IMFMediaType pType, IMFSinkWriter pWriter, out int pdwStreamIndex ) { HResult hr = HResult.S_OK; IMFMediaType pType2 = null; hr = MFExtern.MFCreateMediaType(out pType2); if (Succeeded(hr)) { hr = pType2.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video); } if (Succeeded(hr)) { hr = pType2.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, eparams.subtype); } if (Succeeded(hr)) { hr = pType2.SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, eparams.bitrate); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_FRAME_SIZE); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_FRAME_RATE); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_INTERLACE_MODE); } pdwStreamIndex = 0; if (Succeeded(hr)) { hr = pWriter.AddStream(pType2, out pdwStreamIndex); } SafeRelease(pType2); return(hr); }
private void CloseAllMediaDevices() { HResult hr; Console.WriteLine("CloseAllMediaDevices"); // close and release our call back handler if (mediaSessionAsyncCallbackHandler != null) { // stop any messaging or events in the call back handler mediaSessionAsyncCallbackHandler.ShutDown(); mediaSessionAsyncCallbackHandler = null; } // Shut down the source reader if (StreamReader != null) { Marshal.ReleaseComObject(StreamReader); StreamReader = null; } // close the session (this is NOT the same as shutting it down) if (mediaSession != null) { hr = mediaSession.Close(); if (hr != HResult.S_OK) { // just log it Console.WriteLine("CloseAllMediaDevices call to mediaSession.Close failed. Err=" + hr.ToString()); } } // Shut down the media source if (MediaSource != null) { hr = MediaSource.Shutdown(); if (hr != HResult.S_OK) { // just log it Console.WriteLine("CloseAllMediaDevices call to mediaSource.Shutdown failed. Err=" + hr.ToString()); } Marshal.ReleaseComObject(MediaSource); MediaSource = null; } // Shut down the media session (note we only closed it before). if (mediaSession != null) { hr = mediaSession.Shutdown(); if (hr != HResult.S_OK) { // just log it Console.WriteLine("CloseAllMediaDevices call to mediaSession.Shutdown failed. Err=" + hr.ToString()); } Marshal.ReleaseComObject(mediaSession); mediaSession = null; } if (currentVideoMediaType != null) { Marshal.ReleaseComObject(currentVideoMediaType); currentVideoMediaType = null; } }
/// <summary> /// Wrap this media type into another media type. /// </summary> /// <param name="mediaType">A valid IMFMediaType instance.</param> /// <param name="majorType">Define the major type of the new media type.</param> /// <param name="subType">Define the subtype of the new media type.</param> /// <param name="wrap">Receives a new media type that wrap the current instance.</param> /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns> public static HResult WrapMediaType(this IMFMediaType mediaType, Guid majorType, Guid subType, out IMFMediaType wrap) { if (mediaType == null) { throw new ArgumentNullException("mediaType"); } return(MFExtern.MFWrapMediaType(mediaType, majorType, subType, out wrap)); }
/// <summary> /// Retrieve the media type wrapped into this instance. /// </summary> /// <param name="mediaType">A valid IMFMediaType instance.</param> /// <param name="original">Receives the original media type.</param> /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns> public static HResult UnwrapMediaType(this IMFMediaType mediaType, out IMFMediaType original) { if (mediaType == null) { throw new ArgumentNullException("mediaType"); } return(MFExtern.MFUnwrapMediaType(mediaType, out original)); }
/// <summary> /// Compares this media type to a partial media type. /// </summary> /// <param name="mediaType">A valid IMFMediaType instance.</param> /// <param name="partialMediaType">A partial media type.</param> /// <returns>True if <paramref name="partialMediaType"/> have the same major type and if all attributes from the partial type exists in the current instance with the same value ; False otherwise.</returns> public static bool CompareToPartialMediaType(this IMFMediaType mediaType, IMFMediaType partialMediaType) { if (mediaType == null) { throw new ArgumentNullException("mediaType"); } return(MFExtern.MFCompareFullToPartialMediaType(mediaType, partialMediaType)); }
public static KeyValuePair <Guid, object>[] ToValues(this IMFMediaType input) { if (input == null) { throw new ArgumentNullException(nameof(input)); } return(Enumerate(input).Select(kv => new KeyValuePair <Guid, object>(kv.Key, GetValue(input, kv.Key))).ToArray()); }
protected override HResult OnEnumInputTypes(int dwTypeIndex, out IMFMediaType pInputType) { // I'd like to skip implementing this, but while some clients // don't require it (PlaybackFX), some do (MEPlayer/IMFMediaEngine). // Although frame counting should be able to run against any type, // we must at a minimum provide a major type. return(CreatePartialType(dwTypeIndex, MFMediaType.Video, m_MediaSubtypes, out pInputType)); }
public static T Get <T>(this IMFMediaType input, Guid key, T defaultValue = default, IFormatProvider provider = null) { if (!TryGet(input, key, provider, out T value)) { return(defaultValue); } return(value); }
public void CopyFrom(IMFMediaType pType) { if (pType == null) { throw new Exception("E_POINTER"); } int hr = pType.CopyAllItems(m_pType); MFError.ThrowExceptionForHR(hr); }
private void GetInterface() { IMFMediaType[] pmt = new IMFMediaType[1]; int hr = MFExtern.MFCreateMediaType(out pmt[0]); MFError.ThrowExceptionForHR(hr); hr = MFExtern.MFCreateStreamDescriptor(333, 1, pmt, out m_sd); MFError.ThrowExceptionForHR(hr); }
private string DescribeMediaType(IMFMediaType mediaType) { int attributeCount; mediaType.GetCount(out attributeCount); var sb = new StringBuilder(); for (int n = 0; n < attributeCount; n++) { DescribeAttribute(mediaType, n, sb); } return sb.ToString(); }
// Construct from an existing media type. public MediaTypeBuilder(IMFMediaType pType) { Debug.Assert(pType != null); if (pType != null) { m_pType = pType; } else { throw new Exception("E_POINTER"); } }
private static void CheckAudioFormat(IMFMediaType mediaType, IDictionary<FormatCheckElement, Tuple<bool, object>> ret) { var subType = mediaType.GetSubType(); var isAac = subType == MFMediaType.AAC; ret.Add(FormatCheckElement.AudioCodec, Tuple.Create<bool, object>(isAac, subType)); if (isAac) { var aacObjectType = mediaType.GetAacObjectType(); ret.Add(FormatCheckElement.AudioAacObjectType, Tuple.Create<bool, object>(aacObjectType == 2, aacObjectType)); var channels = mediaType.GetChannel(); ret.Add(FormatCheckElement.AudioChannel, Tuple.Create<bool, object>(channels == 1 || channels == 2, channels)); } }
private void GetInterface() { IMFStreamDescriptor m_sd; IMFMediaType[] pmt = new IMFMediaType[1]; int hr = MFExtern.MFCreateMediaType(out pmt[0]); MFError.ThrowExceptionForHR(hr); pmt[0].SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video); hr = MFExtern.MFCreateStreamDescriptor(333, 1, pmt, out m_sd); MFError.ThrowExceptionForHR(hr); hr = m_sd.GetMediaTypeHandler(out m_mth); MFError.ThrowExceptionForHR(hr); }
private string DescribeMediaType(IMFMediaType mediaType) { int attributeCount; mediaType.GetCount(out attributeCount); var sb = new StringBuilder(); for (int n = 0; n < attributeCount; n++) { Guid key; var val = new PropVariant(); mediaType.GetItemByIndex(n, out key, ref val); string propertyName = FieldDescriptionHelper.Describe(typeof(MediaFoundationAttributes), key); sb.AppendFormat("{0}={1}\r\n", propertyName, val.Value); val.Clear(); } return sb.ToString(); }
private static void DescribeAttribute(IMFMediaType mediaType, int n, StringBuilder sb) { var variantPtr = Marshal.AllocHGlobal(MarshalHelpers.SizeOf<PropVariant>()); try { Guid key; mediaType.GetItemByIndex(n, out key, variantPtr); var val = MarshalHelpers.PtrToStructure<PropVariant>(variantPtr); string propertyName = FieldDescriptionHelper.Describe(typeof (MediaFoundationAttributes), key); sb.AppendFormat("{0}={1}\r\n", propertyName, val.Value); } finally { PropVariant.Clear(variantPtr); Marshal.FreeHGlobal(variantPtr); } }
public static void MFCreateMediaType(out IMFMediaType mediaType) { int result = ExternMFCreateMediaType(out mediaType); if (result < 0) { throw new COMException("Exception from HRESULT: 0x" + result.ToString("X", System.Globalization.NumberFormatInfo.InvariantInfo) + " (MFCreateMediaType)", result); } }
protected int m_TokenCounter; // Counter. Incremented whenever we create new samples. #endregion Fields #region Constructors /// <summary> /// Constructor /// </summary> public EVRCustomPresenter() { if (System.Threading.Thread.CurrentThread.GetApartmentState() != System.Threading.ApartmentState.MTA) { throw new Exception("Unsupported theading model"); } m_iDiscarded = 0; m_pClock = null; m_pMixer = null; m_pMediaEventSink = null; m_h2 = null; m_pMediaType = null; m_bSampleNotify = false; m_bRepaint = false; m_bEndStreaming = false; m_bPrerolled = false; m_RenderState = RenderState.Shutdown; m_fRate = 1.0f; m_TokenCounter = 0; m_pD3DPresentEngine = new D3DPresentEngine(); m_FrameStep = new FrameStep(); // Frame-stepping information. m_nrcSource = new MFVideoNormalizedRect(0.0f, 0.0f, 1.0f, 1.0f); m_scheduler = new Scheduler(D3DPresentEngine.PRESENTER_BUFFER_COUNT, m_pD3DPresentEngine); // Manages scheduling of samples. m_SamplePool = new SamplePool(D3DPresentEngine.PRESENTER_BUFFER_COUNT); // Pool of allocated samples. // Force load of mf.dll now, rather than when we try to start streaming DllCanUnloadNow(); }
protected void SetMediaType(IMFMediaType pMediaType) { // Note: pMediaType can be NULL (to clear the type) int hr; // Clearing the media type is allowed in any state (including shutdown). if (pMediaType == null) { SafeRelease(m_pMediaType); m_pMediaType = null; ReleaseResources(); return; } try { MFRatio fps; Queue<IMFSample> sampleQueue = new Queue<IMFSample>(); // Cannot set the media type after shutdown. CheckShutdown(); // Check if the new type is actually different. // Note: This function safely handles NULL input parameters. if (Utils.AreMediaTypesEqual(m_pMediaType, pMediaType)) { return; // Nothing more to do. } // We're really changing the type. First get rid of the old type. SafeRelease(m_pMediaType); m_pMediaType = null; ReleaseResources(); // Initialize the presenter engine with the new media type. // The presenter engine allocates the samples. m_pD3DPresentEngine.CreateVideoSamples(pMediaType, sampleQueue); // Mark each sample with our token counter. If this batch of samples becomes // invalid, we increment the counter, so that we know they should be discarded. foreach (IMFSample pSample1 in sampleQueue) { hr = pSample1.SetUINT32(MFSamplePresenter_SampleCounter, m_TokenCounter); MFError.ThrowExceptionForHR(hr); } // Add the samples to the sample pool. m_SamplePool.Initialize(sampleQueue); // Initialize takes over the queue sampleQueue = null; // Set the frame rate on the scheduler. fps = Utils.GetFrameRate(pMediaType); if ((fps.Numerator != 0) && (fps.Denominator != 0)) { m_scheduler.SetFrameRate(fps); } else { // NOTE: The mixer's proposed type might not have a frame rate, in which case // we'll use an arbitary default. (Although it's unlikely the video source // does not have a frame rate.) m_scheduler.SetFrameRate(g_DefaultFrameRate); } // Store the media type. if (pMediaType != m_pMediaType) { m_pMediaType = pMediaType; } } catch { ReleaseResources(); throw; } }
protected HRESULT SetMediaType(IMFMediaType pMediaType) { // Note: pMediaType can be NULL (to clear the type) // Clearing the media type is allowed in any state (including shutdown). if (pMediaType == null) { m_pMediaType = null; ReleaseResources(); return S_OK; } HRESULT hr = S_OK; MFRatio fps = new MFRatio(0, 0); // Cannot set the media type after shutdown. hr = CheckShutdown(); if (hr.Succeeded) { // Check if the new type is actually different. // Note: This function safely handles NULL input parameters. if (MFHelper.AreMediaTypesEqual(m_pMediaType, pMediaType)) { return S_OK; // Nothing more to do. } // We're really changing the type. First get rid of the old type. m_pMediaType = null; ReleaseResources(); // Initialize the presenter engine with the new media type. // The presenter engine allocates the samples. hr = CreateVideoSamples(pMediaType); if (hr.Succeeded) { // Set the frame rate on the scheduler. if (SUCCEEDED(MFHelper.GetFrameRate(pMediaType, out fps)) && (fps.Numerator != 0) && (fps.Denominator != 0)) { m_scheduler.SetFrameRate(fps); } else { // NOTE: The mixer's proposed type might not have a frame rate, in which case // we'll use an arbitary default. (Although it's unlikely the video source // does not have a frame rate.) m_scheduler.SetFrameRate(g_DefaultFrameRate); } // Store the media type. ASSERT(pMediaType != NULL); m_pMediaType = pMediaType; } } if (hr.Failed) { ReleaseResources(); } return hr; }
//------------------------------------------------------------------- // Name: AddStreamSink // Description: Adds a new stream to the sink. // // Note: This sink has a fixed number of streams, so this method // always returns MF_E_STREAMSINKS_FIXED. //------------------------------------------------------------------- public int AddStreamSink( int dwStreamSinkIdentifier, IMFMediaType pMediaType, out IMFStreamSinkAlt ppStreamSink) { // Make sure we *never* leave this entry point with an exception try { TRACE("CWavSink::AddStreamSink"); throw new COMException("Fixed streams", MFError.MF_E_STREAMSINKS_FIXED); } catch (Exception e) { ppStreamSink = null; return Marshal.GetHRForException(e); } }
//------------------------------------------------------------------- // Name: GetCurrentMediaType // Description: Return the current media type, if any. //------------------------------------------------------------------- public int GetCurrentMediaType(out IMFMediaType ppMediaType) { TRACE("CWavStream::GetCurrentMediaType"); lock (this) { CheckShutdown(); if (m_pCurrentType == null) { throw new COMException("no type set", MFError.MF_E_NOT_INITIALIZED); } ppMediaType = m_pCurrentType; } return S_Ok; }
//------------------------------------------------------------------- // Name: SetCurrentMediaType // Description: Set the current media type. //------------------------------------------------------------------- public int SetCurrentMediaType(IMFMediaType pMediaType) { TRACE("CWavStream::SetCurrentMediaType"); lock (this) { if (pMediaType == null) { throw new COMException("Null media type", E_InvalidArgument); } CheckShutdown(); // We don't allow format changes after streaming starts, // because this would invalidate the .wav file. ValidateOperation(CAsyncOperation.StreamOperation.OpSetMediaType); IsMediaTypeSupported(pMediaType, IntPtr.Zero); //SAFE_RELEASE(m_pCurrentType); m_pCurrentType = pMediaType; m_state = State.Ready; } return S_Ok; }
protected HRESULT IsMediaTypeSupported(IMFMediaType pMediaType) { MFHelper.VideoTypeBuilder pProposed = null; HRESULT hr = S_OK; Format d3dFormat = Format.Unknown; bool bCompressed = false; MFVideoInterlaceMode InterlaceMode = MFVideoInterlaceMode.Unknown; MFVideoArea VideoCropArea; int width = 0, height = 0; // Helper object for reading the proposed type. hr = MFHelper.MediaTypeBuilder.Create(pMediaType, out pProposed); if (hr.Failed) return hr; // Reject compressed media types. hr = pProposed.IsCompressedFormat(out bCompressed); if (hr.Failed) return hr; if (bCompressed) { return MFHelper.MF_E_INVALIDMEDIATYPE; } // Validate the format. int nFcc; hr = pProposed.GetFourCC(out nFcc); if (hr.Failed) return hr; d3dFormat = (Format)nFcc; // The D3DPresentEngine checks whether the format can be used as // the back-buffer format for the swap chains. hr = CheckFormat(d3dFormat); if (hr.Failed) return hr; // Reject interlaced formats. hr = pProposed.GetInterlaceMode(out InterlaceMode); if (hr.Failed) return hr; if (InterlaceMode != MFVideoInterlaceMode.Progressive) { return MFHelper.MF_E_INVALIDMEDIATYPE; } hr = pProposed.GetFrameDimensions(out width, out height); if (hr.Failed) return hr; // Validate the various apertures (cropping regions) against the frame size. // Any of these apertures may be unspecified in the media type, in which case // we ignore it. We just want to reject invalid apertures. if (SUCCEEDED(pProposed.GetPanScanAperture(out VideoCropArea))) { MFHelper.ValidateVideoArea(VideoCropArea, width, height); } if (SUCCEEDED(pProposed.GetGeometricAperture(out VideoCropArea))) { MFHelper.ValidateVideoArea(VideoCropArea, width, height); } if (SUCCEEDED(pProposed.GetMinDisplayAperture(out VideoCropArea))) { MFHelper.ValidateVideoArea(VideoCropArea, width, height); } return hr; }
public override void Dispose() { base.Dispose(); if (m_DeviceManager != null) { Marshal.ReleaseComObject(m_DeviceManager); m_DeviceManager = null; } m_pClock = null; if (m_pMixer != IntPtr.Zero) { Marshal.Release(m_pMixer); m_pMixer = IntPtr.Zero; } m_pMediaEventSink = null; m_pMediaType = null; }
protected HRESULT GetSwapChainPresentParameters(IMFMediaType pType, out PresentParameters pPP) { pPP = null; HRESULT hr; int width = 0, height = 0; UInt32 d3dFormat = 0; Guid _subtype; hr = (HRESULT)pType.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out _subtype); hr.Assert(); if (hr.Succeeded) { hr = MFHelper.MFGetAttribute2UINT32asUINT64(pType, MFAttributesClsid.MF_MT_FRAME_SIZE, out width, out height); hr.Assert(); if (hr.Succeeded) { FOURCC _fourcc = new FOURCC(_subtype); d3dFormat = _fourcc; pPP = new PresentParameters(); pPP.BackBufferWidth = width; pPP.BackBufferHeight = height; pPP.Windowed = true; pPP.SwapEffect = SwapEffect.Copy; pPP.BackBufferFormat = (Format)d3dFormat; pPP.DeviceWindowHandle = m_VideoControl != null ? m_VideoControl.Handle : m_Device.CreationParameters.Window; pPP.PresentFlags = PresentFlags.Video; pPP.PresentationInterval = PresentInterval.Default; if (m_Device.CreationParameters.DeviceType != DeviceType.Hardware) { pPP.PresentFlags = pPP.PresentFlags | PresentFlags.LockableBackBuffer; } } } return hr; }
protected HRESULT CreateVideoSamples(IMFMediaType pFormat) { if (pFormat == null) { return MFHelper.MF_E_UNEXPECTED; } HRESULT hr = S_OK; PresentParameters pp; lock (m_ObjectLock) { hr = GetSwapChainPresentParameters(pFormat, out pp); hr.Assert(); if (hr.Failed) { return hr; } // Create the video samples. for (int i = 0; i < PRESENTER_BUFFER_COUNT; i++) { Surface _surface = null; if (m_bUseSwapChain) { // Create a new swap chain. SwapChain pSwapChain = new SwapChain(m_Device, pp); if (pSwapChain == null) { return E_UNEXPECTED; } _surface = pSwapChain.GetBackBuffer(0); if (_surface == null) return E_FAIL; m_Device.ColorFill(_surface, new Color4(Color.Black)); } else { _surface = Surface.CreateRenderTarget(m_Device, pp.BackBufferWidth, pp.BackBufferHeight, pp.BackBufferFormat, pp.Multisample, pp.MultisampleQuality, true); } // Create the video sample from the swap chain. MFSample pVideoSample = new MFSample(); pVideoSample.Target = _surface; hr = (HRESULT)MFHelper.MFCreateVideoSampleFromSurface(Marshal.GetObjectForIUnknown(_surface.ComPointer), out pVideoSample.Sample); // Add it to the list. m_SamplePool.AddSample(pVideoSample); } } return NOERROR; }
int ConfigureEncoder( IMFMediaType pType, IMFSinkWriter pWriter, out int pdwStreamIndex ) { int hr = S_Ok; IMFMediaType pType2 = null; hr = MFExtern.MFCreateMediaType(out pType2); if (Succeeded(hr)) { hr = pType2.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video); } if (Succeeded(hr)) { hr = pType2.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MEDIATYPE); } if (Succeeded(hr)) { hr = pType2.SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, TARGET_BIT_RATE); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_FRAME_SIZE); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_FRAME_RATE); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO); } if (Succeeded(hr)) { hr = CopyAttribute(pType, pType2, MFAttributesClsid.MF_MT_INTERLACE_MODE); } pdwStreamIndex = 0; if (Succeeded(hr)) { hr = pWriter.AddStream(pType2, out pdwStreamIndex); } SafeRelease(pType2); return hr; }
public void Dispose() { TRACE("CWavStream::Dispose"); SafeRelease(m_pEventQueue); SafeRelease(m_pByteStream); SafeRelease(m_pCurrentType); SafeRelease(m_pFinalizeResult); m_pEventQueue = null; m_pByteStream = null; m_pCurrentType = null; m_pFinalizeResult = null; //m_pSink.Dispose(); // break deadly embrace m_pSink = null; GC.SuppressFinalize(this); }
int m_WorkQueueId; // ID of the work queue for asynchronous operations. #endregion Fields #region Constructors public CWavStream(CWavSink pParent, IMFByteStream pByteStream) { int hr; TRACE("CWavStream::CWavStream"); m_SampleQueue = new Queue(); m_state = State.TypeNotSet; m_IsShutdown = false; m_pSink = null; m_pEventQueue = null; m_pByteStream = null; m_pCurrentType = null; m_pFinalizeResult = null; m_StartTime = 0; m_cbDataWritten = 0; m_WorkQueueId = 0; Debug.Assert(pParent != null); Debug.Assert(pByteStream != null); MFByteStreamCapabilities dwCaps = MFByteStreamCapabilities.None; const MFByteStreamCapabilities dwRequiredCaps = (MFByteStreamCapabilities.IsWritable | MFByteStreamCapabilities.IsSeekable); // Make sure the byte stream has the necessary caps bits. hr = pByteStream.GetCapabilities(out dwCaps); MFError.ThrowExceptionForHR(hr); if ((dwCaps & dwRequiredCaps) != dwRequiredCaps) { throw new COMException("stream doesn't have required caps", E_Fail); } // Move the file pointer to leave room for the RIFF headers. hr = pByteStream.SetCurrentPosition(Marshal.SizeOf(typeof(WAV_FILE_HEADER))); MFError.ThrowExceptionForHR(hr); // Create the event queue helper. hr = MFExternAlt.MFCreateEventQueue(out m_pEventQueue); MFError.ThrowExceptionForHR(hr); // Allocate a new work queue for async operations. hr = MFExtern.MFAllocateWorkQueue(out m_WorkQueueId); MFError.ThrowExceptionForHR(hr); m_pByteStream = pByteStream; m_pSink = pParent; }
//------------------------------------------------------------------- // Name: GetMediaTypeByIndex // Description: Return a preferred media type by index. //------------------------------------------------------------------- public int GetMediaTypeByIndex( /* [in] */ int dwIndex, /* [out] */ out IMFMediaType ppType) { int hr; TRACE("CWavStream::GetMediaTypeByIndex"); lock (this) { CheckShutdown(); if (dwIndex >= CWavSink.g_AudioFormats.Length) { throw new COMException("No more types", MFError.MF_E_NO_MORE_TYPES); } WaveFormatEx wav; InitializePCMWaveFormat(out wav, CWavSink.g_AudioFormats[dwIndex]); hr = MFExtern.MFCreateMediaType(out ppType); MFError.ThrowExceptionForHR(hr); hr = MFExtern.MFInitMediaTypeFromWaveFormatEx(ppType, wav, Marshal.SizeOf(typeof(WaveFormatEx))); MFError.ThrowExceptionForHR(hr); } return S_Ok; }
protected void IsMediaTypeSupported(IMFMediaType pMediaType) { VideoTypeBuilder pProposed = null; bool bCompressed = false; MFVideoInterlaceMode InterlaceMode = MFVideoInterlaceMode.Unknown; MFVideoArea VideoCropArea; int width = 0, height = 0; try { // Helper object for reading the proposed type. pProposed = new VideoTypeBuilder(pMediaType); // Reject compressed media types. pProposed.IsCompressedFormat(out bCompressed); if (bCompressed) { throw new COMException("Compressed formats not supported", MFError.MF_E_INVALIDMEDIATYPE); } // Validate the format. int i; pProposed.GetFourCC(out i); // The D3DPresentEngine checks whether the format can be used as // the back-buffer format for the swap chains. m_pD3DPresentEngine.CheckFormat(i); // Reject interlaced formats. pProposed.GetInterlaceMode(out InterlaceMode); if (InterlaceMode != MFVideoInterlaceMode.Progressive) { throw new COMException("Interlaced formats not supported", MFError.MF_E_INVALIDMEDIATYPE); } pProposed.GetFrameDimensions(out width, out height); // Validate the various apertures (cropping regions) against the frame size. // Any of these apertures may be unspecified in the media type, in which case // we ignore it. We just want to reject invalid apertures. try { pProposed.GetPanScanAperture(out VideoCropArea); ValidateVideoArea(VideoCropArea, width, height); } catch { } try { pProposed.GetGeometricAperture(out VideoCropArea); ValidateVideoArea(VideoCropArea, width, height); } catch { } try { pProposed.GetMinDisplayAperture(out VideoCropArea); ValidateVideoArea(VideoCropArea, width, height); } catch { } } finally { pProposed.Dispose(); //SafeRelease(pMediaType); } }
// use IntPtr since this can be NULL //------------------------------------------------------------------- // Name: IsMediaTypeSupported // Description: Check if a media type is supported. // // pMediaType: The media type to check. // ppMediaType: Optionally, receives a "close match" media type. //------------------------------------------------------------------- public int IsMediaTypeSupported( /* [in] */ IMFMediaType pMediaType, // /* [out] */ out IMFMediaType ppMediaType) IntPtr ppMediaType) { TRACE("CWavStream::IsMediaTypeSupported"); int hr; Guid majorType; WaveFormatEx pWav; int cbSize; lock (this) { CheckShutdown(); hr = pMediaType.GetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, out majorType); MFError.ThrowExceptionForHR(hr); // First make sure it's audio. if (majorType != MFMediaType.Audio) { throw new COMException("type not audio", MFError.MF_E_INVALIDTYPE); } // Get a WAVEFORMATEX structure to validate against. hr = MFExtern.MFCreateWaveFormatExFromMFMediaType(pMediaType, out pWav, out cbSize, MFWaveFormatExConvertFlags.Normal); MFError.ThrowExceptionForHR(hr); // Validate the WAVEFORMATEX structure. ValidateWaveFormat(pWav, cbSize); // We don't return any "close match" types. if (ppMediaType != IntPtr.Zero) { Marshal.WriteIntPtr(ppMediaType, IntPtr.Zero); } } //CoTaskMemFree(pWav); return S_Ok; }
public void Dispose() { m_pType = null; }
//------------------------------------------------------------------- // Name: Shutdown // Description: Shuts down the stream sink. //------------------------------------------------------------------- public void Shutdown() { int hr; Debug.Assert(!m_IsShutdown); GC.SuppressFinalize(this); if (m_pEventQueue != null) { hr = m_pEventQueue.Shutdown(); MFError.ThrowExceptionForHR(hr); } hr = MFExtern.MFUnlockWorkQueue(m_WorkQueueId); MFError.ThrowExceptionForHR(hr); m_SampleQueue.Clear(); SafeRelease(m_pSink); SafeRelease(m_pEventQueue); SafeRelease(m_pByteStream); SafeRelease(m_pCurrentType); SafeRelease(m_pFinalizeResult); m_pSink = null; m_pEventQueue = null; m_pByteStream = null; m_pCurrentType = null; m_pFinalizeResult = null; m_IsShutdown = true; }
protected void CalculateOutputRectangle(IMFMediaType pProposed, out MFRect prcOutput) { int srcWidth = 0, srcHeight = 0; MFRatio inputPAR; MFRatio outputPAR; MFRect rcOutput = new MFRect(); MFVideoArea displayArea; VideoTypeBuilder pmtProposed = null; // Helper object to read the media type. pmtProposed = new VideoTypeBuilder(pProposed); // Get the source's frame dimensions. pmtProposed.GetFrameDimensions(out srcWidth, out srcHeight); // Get the source's display area. pmtProposed.GetVideoDisplayArea(out displayArea); // Calculate the x,y offsets of the display area. int offsetX = (int)displayArea.OffsetX.GetOffset(); int offsetY = (int)displayArea.OffsetY.GetOffset(); // Use the display area if valid. Otherwise, use the entire frame. if (displayArea.Area.Width != 0 && displayArea.Area.Height != 0 && offsetX + displayArea.Area.Width <= (srcWidth) && offsetY + displayArea.Area.Height <= (srcHeight)) { rcOutput.left = offsetX; rcOutput.right = offsetX + displayArea.Area.Width; rcOutput.top = offsetY; rcOutput.bottom = offsetY + displayArea.Area.Height; } else { rcOutput.left = 0; rcOutput.top = 0; rcOutput.right = srcWidth; rcOutput.bottom = srcHeight; } // rcOutput is now either a sub-rectangle of the video frame, or the entire frame. // If the pixel aspect ratio of the proposed media type is different from the monitor's, // letterbox the video. We stretch the image rather than shrink it. inputPAR = pmtProposed.GetPixelAspectRatio(); // Defaults to 1:1 outputPAR.Denominator = outputPAR.Numerator = 1; // This is an assumption of the sample. // Adjust to get the correct picture aspect ratio. prcOutput = CorrectAspectRatio(rcOutput, inputPAR, outputPAR); pmtProposed.Dispose(); }
//------------------------------------------------------------------- // TryMediaType // // Test a proposed video format. //------------------------------------------------------------------- protected int TryMediaType(IMFMediaType pType) { int hr = S_Ok; bool bFound = false; Guid subtype; hr = pType.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out subtype); if (Failed(hr)) { return hr; } // Do we support this type directly? if (m_draw.IsFormatSupported(subtype)) { bFound = true; } else { // Can we decode this media type to one of our supported // output formats? for (int i = 0; ; i++) { // Get the i'th format. hr = m_draw.GetFormat(i, out subtype); if (Failed(hr)) { break; } hr = pType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, subtype); if (Failed(hr)) { break; } // Try to set this type on the source reader. hr = m_pReader.SetCurrentMediaType((int)MF_SOURCE_READER.FirstVideoStream, IntPtr.Zero, pType); if (Succeeded(hr)) { bFound = true; break; } } } if (bFound) { hr = m_draw.SetVideoType(pType); } return hr; }
// Formats protected void CreateOptimalVideoType(IMFMediaType pProposed, out IMFMediaType ppOptimal) { try { MFRect rcOutput; MFVideoArea displayArea; IMFMediaType pOptimalType = null; // Create the helper object to manipulate the optimal type. VideoTypeBuilder pmtOptimal = new VideoTypeBuilder(); // Clone the proposed type. pmtOptimal.CopyFrom(pProposed); // Modify the new type. // For purposes of this SDK sample, we assume // 1) The monitor's pixels are square. // 2) The presenter always preserves the pixel aspect ratio. // Set the pixel aspect ratio (PAR) to 1:1 (see assumption #1, above) pmtOptimal.SetPixelAspectRatio(1, 1); // Get the output rectangle. rcOutput = m_pD3DPresentEngine.GetDestinationRect(); if (rcOutput.IsEmpty()) { // Calculate the output rectangle based on the media type. CalculateOutputRectangle(pProposed, out rcOutput); } // Set the extended color information: Use BT.709 pmtOptimal.SetYUVMatrix(MFVideoTransferMatrix.BT709); pmtOptimal.SetTransferFunction(MFVideoTransferFunction.Func709); pmtOptimal.SetVideoPrimaries(MFVideoPrimaries.BT709); pmtOptimal.SetVideoNominalRange(MFNominalRange.MFNominalRange_16_235); pmtOptimal.SetVideoLighting(MFVideoLighting.Dim); // Set the target rect dimensions. pmtOptimal.SetFrameDimensions(rcOutput.right, rcOutput.bottom); // Set the geometric aperture, and disable pan/scan. displayArea = new MFVideoArea(0, 0, rcOutput.right, rcOutput.bottom); pmtOptimal.SetPanScanEnabled(false); pmtOptimal.SetGeometricAperture(displayArea); // Set the pan/scan aperture and the minimum display aperture. We don't care // about them per se, but the mixer will reject the type if these exceed the // frame dimentions. pmtOptimal.SetPanScanAperture(displayArea); pmtOptimal.SetMinDisplayAperture(displayArea); // Return the pointer to the caller. pmtOptimal.GetMediaType(out pOptimalType); pmtOptimal.Dispose(); ppOptimal = pOptimalType; } finally { //SafeRelease(pOptimalType); //SafeRelease(pmtOptimal); } }