public static void MFCreateSourceResolver(out IMFSourceResolver sourceResolver) { int result = ExternMFCreateSourceResolver(out sourceResolver); if (result < 0) { throw new COMException("Exception from HRESULT: 0x" + result.ToString("X", System.Globalization.NumberFormatInfo.InvariantInfo) + " (MFCreateSourceResolver)", result); } }
/// <summary> /// Creates a byte stream from a URL. /// </summary> /// <param name="sourceResolver">A valid IMFSourceResolver instance.</param> /// <param name="url">A string that contains the URL to resolve.</param> /// <param name="flags">One or more members of the MFResolution enumeration.</param> /// <param name="properties">An instance of the IPropertyStore interface of a property store. The method passes the property store to the scheme handler or byte-stream handler that creates the object.</param> /// <param name="byteStream">Receives a byte stream that can handle the media file targeted by <paramref name="url"/>.</param> /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns> public static HResult CreateObjectFromURL(this IMFSourceResolver sourceResolver, string url, MFResolution flags, IPropertyStore properties, out IMFByteStream byteStream) { if (sourceResolver == null) { throw new ArgumentNullException("sourceResolver"); } flags &= ~MFResolution.MediaSource; flags |= MFResolution.ByteStream; MFObjectType objectType; object tmp; HResult hr = sourceResolver.CreateObjectFromURL(url, flags, properties, out objectType, out tmp); byteStream = hr.Succeeded() ? tmp as IMFByteStream : null; return(hr); }
private static HResult CreateMediaSource(string videoFile, out IMFMediaSource source) { IMFSourceResolver resolver = null; source = null; HResult hr = MF.CreateSourceResolver(out resolver); if (Failed(hr)) { return(hr); } hr = resolver.CreateObjectFromURL(videoFile, MFResolution.MediaSource, null, out source); if (Failed(hr)) { SafeRelease(source); } SafeRelease(resolver); return(hr); }
/// <summary> /// Based on https://docs.microsoft.com/en-us/windows/desktop/medfound/shell-metadata-providers /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void infoToolStripMenuItem_Click(object sender, EventArgs e) { IMFSourceResolver pSourceResolver = null; IMFMediaSource pSource = null; object pPropsObject = null; var url = this.listView1.SelectedItems[0].Text; try { // Create the source resolver. HResult hr = MFExtern.MFCreateSourceResolver(out pSourceResolver); Validate(hr); if (pSourceResolver == null) { throw new Exception("pSourceResolver is null"); } // Get a pointer to the IMFMediaSource interface of the media source. hr = pSourceResolver.CreateObjectFromURL(url, MFResolution.MediaSource, null, out pSource); Validate(hr); if (pSource == null) { throw new Exception("pSource is null"); } hr = MFExtern.MFGetService(pSource, MFServices.MF_PROPERTY_HANDLER_SERVICE, typeof(IPropertyStore).GUID, out pPropsObject); Validate(hr); if (pPropsObject == null) { throw new Exception("pPropsObject is null"); } IPropertyStore pProps = pPropsObject as IPropertyStore; hr = pProps.GetCount(out int cProps); Validate(hr); var audioInfo = new AudioInfo(); var videoInfo = new VideoInfo(); for (int i = 0; i < cProps; ++i) { var key = new MediaFoundation.Misc.PropertyKey(); hr = pProps.GetAt(i, key); Validate(hr); using (PropVariant pv = new PropVariant()) { hr = pProps.GetValue(key, pv); Validate(hr); FillAudioProperty(audioInfo, key, pv); FillVideoProperty(videoInfo, key, pv); } } MessageBox.Show("Audio =\n" + audioInfo + ";\nVideo =\n" + videoInfo); } finally { if (pSource != null) { Marshal.ReleaseComObject(pSource); } if (pSourceResolver != null) { Marshal.ReleaseComObject(pSourceResolver); } if (pPropsObject != null) { Marshal.ReleaseComObject(pPropsObject); } } // MessageBox.Show(url); }
public static extern void MFCreateSourceResolver(out IMFSourceResolver ppISourceResolver);
/// <summary> /// Creates a media source from a Uri. /// </summary> /// <param name="sourceResolver">A valid IMFSourceResolver instance.</param> /// <param name="url">A Uri that contains the URL to resolve.</param> /// <param name="flags">One or more members of the MFResolution enumeration.</param> /// <param name="properties">An instance of the IPropertyStore interface of a property store. The method passes the property store to the scheme handler or byte-stream handler that creates the object.</param> /// <param name="mediaSource">Receives a media source that can handle the media file targeted by <paramref name="url"/>.</param> /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns> public static HResult CreateObjectFromURL(this IMFSourceResolver sourceResolver, Uri url, MFResolution flags, IPropertyStore properties, out IMFMediaSource mediaSource) { return(CreateObjectFromURL(sourceResolver, url.ToString(), flags, properties, out mediaSource)); }
/// <summary> /// Creates a media source from a byte stream. /// </summary> /// <param name="sourceResolver">A valid IMFSourceResolver instance.</param> /// <param name="byteStream">An instance of the byte stream's IMFByteStream interface.</param> /// <param name="flags">One or more members of the MFResolution enumeration.</param> /// <param name="properties">An instance of the IPropertyStore interface of a property store. The method passes the property store to the scheme handler or byte-stream handler that creates the object.</param> /// <param name="mediaSource">Receives a media source that can handle the provided byte stream.</param> /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns> public static HResult CreateObjectFromByteStream(this IMFSourceResolver sourceResolver, IMFByteStream byteStream, MFResolution flags, IPropertyStore properties, out IMFMediaSource mediaSource) { return(CreateObjectFromByteStream(sourceResolver, byteStream, (string)null, flags, properties, out mediaSource)); }
void ValidateMP4OutputFile(string mp4filepath) { ulong duration = 0; uint videoWidth = 0; uint videoHeight = 0; double videoFPS = 0.0; uint videoBitrate = 0; try { IMFMediaSource mediaSource = null; IMFSourceReader sourceReader = null; ulong videoSize = 0; ulong frameRate = 0; MFHelper.IMFMediaType mediaType = null; IMFPresentationDescriptor presentationDescriptor = null; uint objectType = default(uint); object objectSource = null; API.MFStartup(); // Create the media source using source resolver and the input URL IMFSourceResolver sourceResolver = null; API.MFCreateSourceResolver(out sourceResolver); // sourceResolver.CreateObjectFromURL("..\\..\\Apps\\SmartCam\\SmartRecorder\\Output\\VideoWriterTest\\CreateTestWMVFile_640x480_24fps_15s\\TestMP4File_640x480_24fps_15s.mp4", Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource); sourceResolver.CreateObjectFromURL(mp4filepath, Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource); mediaSource = (IMFMediaSource)objectSource; API.MFCreateSourceReaderFromMediaSource(mediaSource, null, out sourceReader); mediaSource.CreatePresentationDescriptor(out presentationDescriptor); // Get the duration presentationDescriptor.GetUINT64(new Guid(Consts.MF_PD_DURATION), out duration); // Get the video width and height sourceReader.GetCurrentMediaType(0, out mediaType); mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_SIZE), out videoSize); videoWidth = (uint)(videoSize >> 32); videoHeight = (uint)(videoSize & 0x00000000FFFFFFFF); // Get the Frame Rate mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_RATE), out frameRate); if ((frameRate & 0x00000000FFFFFFFF) != 0) { videoFPS = (double)(frameRate >> 32) / (double)(frameRate & 0x00000000FFFFFFFF); } // Get the encoding bitrate mediaType.GetUINT32(new Guid(Consts.MF_MT_AVG_BITRATE), out videoBitrate); API.MFShutdown(); } catch (Exception exception) { Console.WriteLine("Exception failure: {0}", exception.ToString()); Assert.IsFalse(true); } Assert.IsFalse(Math.Abs((double)duration - (double)VIDEO_DURATION_IN_100_NS) > (double)VIDEO_DURATION_VAR_IN_100_NS); Assert.IsFalse(videoWidth != VIDEO_WIDTH); Assert.IsFalse(videoHeight != VIDEO_HEIGHT); Assert.IsFalse(Math.Abs(videoFPS - VIDEO_FPS) > VIDEO_FPS_VAR); Assert.IsFalse(Math.Abs((int)videoBitrate - VIDEO_ENCODE_BITRATE) > VIDEO_ENCODE_BITRATE_VAR); }
/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Opens prepares the media session and topology and opens the media source /// and media sink. /// /// Once the session and topology are setup, a MESessionTopologySet event /// will be triggered in the callback handler. After that the events there /// trigger other events and everything rolls along automatically. /// </summary> /// <param name="sourceFileName">the source file name</param> /// <param name="outputFileName">the name of the output file</param> /// <history> /// 01 Nov 18 Cynic - Originally Written /// </history> public void PrepareSessionAndTopology(string sourceFileName, string outputFileName) { HResult hr; IMFSourceResolver pSourceResolver = null; IMFTopology pTopology = null; IMFPresentationDescriptor sourcePresentationDescriptor = null; int sourceStreamCount = 0; IMFStreamDescriptor audioStreamDescriptor = null; bool streamIsSelected = false; IMFTopologyNode sourceAudioNode = null; IMFTopologyNode outputSinkNode = null; IMFMediaType currentAudioMediaType = null; int audioStreamIndex = -1; LogMessage("PrepareSessionAndTopology "); // we sanity check the filenames - the existence of the path and if the file already exists // should have been checked before this call if ((sourceFileName == null) || (sourceFileName.Length == 0)) { throw new Exception("PrepareSessionAndTopology: source file name is invalid. Cannot continue."); } if ((outputFileName == null) || (outputFileName.Length == 0)) { throw new Exception("PrepareSessionAndTopology: output file name is invalid. Cannot continue."); } try { // reset everything CloseAllMediaDevices(); // Create the media session. hr = MFExtern.MFCreateMediaSession(null, out mediaSession); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. Err=" + hr.ToString()); } if (mediaSession == null) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. mediaSession == null"); } // set up our media session call back handler. mediaSessionAsyncCallbackHandler = new TantaAsyncCallbackHandler(); mediaSessionAsyncCallbackHandler.Initialize(); mediaSessionAsyncCallbackHandler.MediaSession = mediaSession; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackError = HandleMediaSessionAsyncCallBackErrors; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackEvent = HandleMediaSessionAsyncCallBackEvent; // Register the callback handler with the session and tell it that events can // start. This does not actually trigger an event it just lets the media session // know that it can now send them if it wishes to do so. hr = mediaSession.BeginGetEvent(mediaSessionAsyncCallbackHandler, null); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to mediaSession.BeginGetEvent failed. Err=" + hr.ToString()); } // Create a new topology. A topology describes a collection of media sources, sinks, and transforms that are // connected in a certain order. These objects are represented within the topology by topology nodes, // which expose the IMFTopologyNode interface. A topology describes the path of multimedia data through these nodes. hr = MFExtern.MFCreateTopology(out pTopology); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. Err=" + hr.ToString()); } if (pTopology == null) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. pTopology == null"); } // #### // #### we now create the media source, this is an audio file // #### // use the file name to create the media source for the audio device. Media sources are objects that generate media data. // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each // media source contains one or more streams, and each stream delivers data of one type, such as audio or video. mediaSource = TantaWMFUtils.GetMediaSourceFromFile(sourceFileName); if (mediaSource == null) { throw new Exception("PrepareSessionAndTopology call to mediaSource == null"); } // A presentation is a set of related media streams that share a common presentation time. We now get a copy of the media // source's presentation descriptor. Applications can use the presentation descriptor to select streams // and to get information about the source content. hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString()); } if (sourcePresentationDescriptor == null) { throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null"); } // Now we get the number of stream descriptors in the presentation. A presentation descriptor contains a list of one or more // stream descriptors. These describe the streams in the presentation. Streams can be either selected or deselected. Only the // selected streams produce data. Deselected streams are not active and do not produce any data. hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out sourceStreamCount); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString()); } if (sourceStreamCount == 0) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0"); } // Look at each stream, there can be more than one stream here // Usually only one is enabled. This app uses the first "selected" // stream we come to which has the appropriate media type for (int i = 0; i < sourceStreamCount; i++) { // we require the major type to be audio Guid guidMajorType = TantaWMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, i); if (guidMajorType != MFMediaType.Audio) { continue; } // we also require the stream to be enabled hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(i, out streamIsSelected, out audioStreamDescriptor); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. Err=" + hr.ToString()); } if (audioStreamDescriptor == null) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null"); } // if the stream is selected, leave now we will release the audioStream descriptor later if (streamIsSelected == true) { audioStreamIndex = i; // record this break; } // release the one we are not using if (audioStreamDescriptor != null) { Marshal.ReleaseComObject(audioStreamDescriptor); audioStreamDescriptor = null; } audioStreamIndex = -1; } // by the time we get here we should have a audioStreamDescriptor if // we do not, then we cannot proceed if (audioStreamDescriptor == null) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null"); } if (audioStreamIndex < 0) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamIndex < 0"); } // #### // #### we now create the media sink, we need the type from the stream to do // #### this which is why we wait until now to set it up // #### currentAudioMediaType = TantaWMFUtils.GetCurrentMediaTypeFromStreamDescriptor(audioStreamDescriptor); if (currentAudioMediaType == null) { throw new Exception("PrepareSessionAndTopology call to currentAudioMediaType == null"); } mediaSink = OpenMediaFileSink(outputFileName); if (mediaSink == null) { throw new Exception("PrepareSessionAndTopology call to mediaSink == null"); } // #### // #### we now make up a topology branch for the audio stream // #### // Create a source node for this stream. sourceAudioNode = TantaWMFUtils.CreateSourceNodeForStream(mediaSource, sourcePresentationDescriptor, audioStreamDescriptor); if (sourceAudioNode == null) { throw new Exception("PrepareSessionAndTopology call to CreateSourceNodeForStream failed. pSourceNode == null"); } // Create the output node - this is a file sink in this case. outputSinkNode = TantaWMFUtils.CreateSinkNodeForStream(mediaSink); if (outputSinkNode == null) { throw new Exception("PrepareSessionAndTopology call to CreateOutputNodeForStream failed. outputSinkNode == null"); } // Add the nodes to the topology. First the source hr = pTopology.AddNode(sourceAudioNode); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(sourceAudioNode) failed. Err=" + hr.ToString()); } // then add the output hr = pTopology.AddNode(outputSinkNode); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(outputSinkNode) failed. Err=" + hr.ToString()); } // Connect the output stream from the source node to the input stream of the output node. The parameters are: // dwOutputIndex - Zero-based index of the output stream on this node. // *pDownstreamNode - Pointer to the IMFTopologyNode interface of the node to connect to. // dwInputIndexOnDownstreamNode - Zero-based index of the input stream on the other node. hr = sourceAudioNode.ConnectOutput(0, outputSinkNode, 0); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to pSourceNode.ConnectOutput failed. Err=" + hr.ToString()); } // Set the topology on the media session. // If SetTopology succeeds, the media session will queue an // MESessionTopologySet event. hr = mediaSession.SetTopology(0, pTopology); MFError.ThrowExceptionForHR(hr); // Release the topology if (pTopology != null) { Marshal.ReleaseComObject(pTopology); } } catch (Exception ex) { LogMessage("Error: " + ex.Message); OISMessageBox(ex.Message); } finally { // Clean up if (pSourceResolver != null) { Marshal.ReleaseComObject(pSourceResolver); } if (sourcePresentationDescriptor != null) { Marshal.ReleaseComObject(sourcePresentationDescriptor); } if (audioStreamDescriptor != null) { Marshal.ReleaseComObject(audioStreamDescriptor); } if (sourceAudioNode != null) { Marshal.ReleaseComObject(sourceAudioNode); } if (outputSinkNode != null) { Marshal.ReleaseComObject(outputSinkNode); } if (currentAudioMediaType != null) { Marshal.ReleaseComObject(currentAudioMediaType); } } }
/// <summary> /// Starts the asychronous encode operation /// </summary> /// <param name="inputURL">Source filename</param> /// <param name="outputURL">Targe filename</param> /// <param name="audioOutput">Audio format that will be used for audio streams</param> /// <param name="videoOutput">Video format that will be used for video streams</param> /// <param name="startPosition">Starting position of the contet</param> /// <param name="endPosition">Position where the new content will end</param> public void Encode(string inputURL, string outputURL, AudioFormat audioOutput, VideoFormat videoOutput, ulong startPosition, ulong endPosition) { // If busy with other operation ignore and return if (this.IsBusy()) { return; } try { this.presentationClock = null; this.startPosition = startPosition; this.endPosition = endPosition; object objectSource = null; // Create the media source using source resolver and the input URL uint objectType = default(uint); this.mediaSource = null; // Init source resolver IMFSourceResolver sourceResolver = null; MFHelper.MFCreateSourceResolver(out sourceResolver); sourceResolver.CreateObjectFromURL(inputURL, Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource); this.mediaSource = (IMFMediaSource)objectSource; // Create the media session using a global start time so MF_TOPOLOGY_PROJECTSTOP can be used to stop the session this.mediaSession = null; IMFAttributes mediaSessionAttributes = null; MFHelper.MFCreateAttributes(out mediaSessionAttributes, 1); mediaSessionAttributes.SetUINT32(new Guid(Consts.MF_SESSION_GLOBAL_TIME), 1); MFHelper.MFCreateMediaSession(mediaSessionAttributes, out this.mediaSession); // Create the event handler AsyncEventHandler mediaEventHandler = new AsyncEventHandler(this.mediaSession); mediaEventHandler.MediaEvent += this.MediaEvent; // Get the stream descriptor IMFPresentationDescriptor presentationDescriptor = null; mediaSource.CreatePresentationDescriptor(out presentationDescriptor); // Get the duration presentationDescriptor.GetUINT64(new Guid(Consts.MF_PD_DURATION), out this.duration); IMFTranscodeProfile transcodeProfile = null; Guid containerType = new Guid(Consts.MFTranscodeContainerType_MPEG4); if (outputURL.EndsWith(".wmv", StringComparison.OrdinalIgnoreCase) || outputURL.EndsWith(".wma", StringComparison.OrdinalIgnoreCase)) { containerType = new Guid(Consts.MFTranscodeContainerType_ASF); } // Generate the transcoding profile transcodeProfile = SimpleFastEncode.CreateProfile(audioOutput, videoOutput, containerType); // Create the MF topology using the profile IMFTopology topology = null; MFHelper.MFCreateTranscodeTopology(this.mediaSource, outputURL, transcodeProfile, out topology); // Set the end position topology.SetUINT64(new Guid(Consts.MF_TOPOLOGY_PROJECTSTART), 0); topology.SetUINT64(new Guid(Consts.MF_TOPOLOGY_PROJECTSTOP), (endPosition == 0) ? this.duration : endPosition); // Set the session topology this.mediaSession.SetTopology((uint)Enums.MFSESSION_SETTOPOLOGY_FLAGS.None, topology); } catch (Exception ex) { this.mediaSession = null; // Fire the EncodeError event if (this.EncodeError != null) { this.EncodeError(new Exception(ex.Message, ex)); } } }
public override void StartStream() { Task.Run(() => { // check our source filename is correct and usable StreamReaderHandler = new StreamHandler(ProcessFrame, MFBPressed); HResult hr; IMFSourceResolver pSourceResolver = null; IMFPresentationDescriptor sourcePresentationDescriptor = null; IMFStreamDescriptor videoStreamDescriptor = null; try { // reset everything CloseAllMediaDevices(); // Create the media session. hr = MFExtern.MFCreateMediaSession(null, out mediaSession); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. Err=" + hr.ToString()); } // set up our media session call back handler. mediaSessionAsyncCallbackHandler = new AsyncCallbackHandler(); mediaSessionAsyncCallbackHandler.Initialize(); mediaSessionAsyncCallbackHandler.MediaSession = mediaSession; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackError = HandleMediaSessionAsyncCallBackErrors; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackEvent = HandleMediaSessionAsyncCallBackEvent; // Register the callback handler with the session and tell it that events can // start. This does not actually trigger an event it just lets the media session // know that it can now send them if it wishes to do so. hr = mediaSession.BeginGetEvent(mediaSessionAsyncCallbackHandler, null); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to mediaSession.BeginGetEvent failed. Err=" + hr.ToString()); } StreamReader = WMFUtils.CreateSourceReaderAsyncFromDevice(UnderlyingDevice, StreamReaderHandler); try { SetCurrentMediaType(StreamReader, IMFStreamIndex, 0); } catch { } try { SetCurrentMediaType(StreamReader, IMFSnapIndex, 1); } catch { } StreamReaderHandler.StreamReader = StreamReader; StreamReader.GetCurrentMediaType(1, out IMFMediaType ppMediaType); StreamReader.GetNativeMediaType(1, 0, out IMFMediaType PPMediaType); StreamReader.GetCurrentMediaType(0, out IMFMediaType ppMediaType1); StreamReader.GetNativeMediaType(0, 0, out IMFMediaType PPMediaType1); ppMediaType.GetMajorType(out Guid ppMediaTypeMGUID); PPMediaType.GetMajorType(out Guid PPMediaTypeMGUID); ppMediaType.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid ppMediaTypeSGUID); PPMediaType.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid PPMediaTypeSGUID); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeMGUID)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeMGUID)); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeSGUID)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeSGUID)); ppMediaType1.GetMajorType(out Guid ppMediaTypeMGUID1); PPMediaType1.GetMajorType(out Guid PPMediaTypeMGUID1); ppMediaType1.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid ppMediaTypeSGUID1); PPMediaType1.GetGUID(MFAttributesClsid.MF_MT_SUBTYPE, out Guid PPMediaTypeSGUID1); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeMGUID1)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeMGUID1)); Console.WriteLine(WMFUtils.ConvertGuidToName(ppMediaTypeSGUID1)); Console.WriteLine(WMFUtils.ConvertGuidToName(PPMediaTypeSGUID1)); Paused = false; StreamReader.SetStreamSelection(0, true); StreamReader.SetStreamSelection(1, true); hr = StreamReader.ReadSample( 0, MediaFoundation.ReadWrite.MF_SOURCE_READER_CONTROL_FLAG.None, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero ); hr = StreamReader.ReadSample(1, MediaFoundation.ReadWrite.MF_SOURCE_READER_CONTROL_FLAG.None, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero ); if (hr != HResult.S_OK) { // we failed throw new Exception("Failed on calling the first ReadSample on the reader, retVal=" + hr.ToString()); } } catch (Exception ex) { Console.WriteLine("PrepareSessionAndTopology Error: " + ex.Message); } finally { // Clean up if (pSourceResolver != null) { Marshal.ReleaseComObject(pSourceResolver); } if (sourcePresentationDescriptor != null) { Marshal.ReleaseComObject(sourcePresentationDescriptor); } if (videoStreamDescriptor != null) { Marshal.ReleaseComObject(videoStreamDescriptor); } } }); }
private static extern int ExternMFCreateSourceResolver( [Out, MarshalAs(UnmanagedType.Interface)] out IMFSourceResolver ppISourceResolver);
static extern HResult MFCreateSourceResolver( out IMFSourceResolver ppISourceResolver );