/// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+= /// <summary> /// Opens prepares the media session and topology and opens the media source /// and media sink. /// /// Once the session and topology are setup, a MESessionTopologySet event /// will be triggered in the callback handler. After that the events there /// trigger other events and everything rolls along automatically. /// </summary> /// <param name="sourceFileName">the source file name</param> /// <param name="outputFileName">the name of the output file</param> /// <history> /// 01 Nov 18 Cynic - Originally Written /// </history> public void PrepareSessionAndTopology(string sourceFileName, string outputFileName) { HResult hr; IMFSourceResolver pSourceResolver = null; IMFTopology pTopology = null; IMFPresentationDescriptor sourcePresentationDescriptor = null; int sourceStreamCount = 0; IMFStreamDescriptor audioStreamDescriptor = null; bool streamIsSelected = false; IMFTopologyNode sourceAudioNode = null; IMFTopologyNode outputSinkNode = null; IMFMediaType currentAudioMediaType = null; int audioStreamIndex = -1; LogMessage("PrepareSessionAndTopology "); // we sanity check the filenames - the existence of the path and if the file already exists // should have been checked before this call if ((sourceFileName == null) || (sourceFileName.Length == 0)) { throw new Exception("PrepareSessionAndTopology: source file name is invalid. Cannot continue."); } if ((outputFileName == null) || (outputFileName.Length == 0)) { throw new Exception("PrepareSessionAndTopology: output file name is invalid. Cannot continue."); } try { // reset everything CloseAllMediaDevices(); // Create the media session. hr = MFExtern.MFCreateMediaSession(null, out mediaSession); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. Err=" + hr.ToString()); } if (mediaSession == null) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. mediaSession == null"); } // set up our media session call back handler. mediaSessionAsyncCallbackHandler = new TantaAsyncCallbackHandler(); mediaSessionAsyncCallbackHandler.Initialize(); mediaSessionAsyncCallbackHandler.MediaSession = mediaSession; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackError = HandleMediaSessionAsyncCallBackErrors; mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackEvent = HandleMediaSessionAsyncCallBackEvent; // Register the callback handler with the session and tell it that events can // start. This does not actually trigger an event it just lets the media session // know that it can now send them if it wishes to do so. hr = mediaSession.BeginGetEvent(mediaSessionAsyncCallbackHandler, null); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to mediaSession.BeginGetEvent failed. Err=" + hr.ToString()); } // Create a new topology. A topology describes a collection of media sources, sinks, and transforms that are // connected in a certain order. These objects are represented within the topology by topology nodes, // which expose the IMFTopologyNode interface. A topology describes the path of multimedia data through these nodes. hr = MFExtern.MFCreateTopology(out pTopology); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. Err=" + hr.ToString()); } if (pTopology == null) { throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. pTopology == null"); } // #### // #### we now create the media source, this is an audio file // #### // use the file name to create the media source for the audio device. Media sources are objects that generate media data. // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each // media source contains one or more streams, and each stream delivers data of one type, such as audio or video. mediaSource = TantaWMFUtils.GetMediaSourceFromFile(sourceFileName); if (mediaSource == null) { throw new Exception("PrepareSessionAndTopology call to mediaSource == null"); } // A presentation is a set of related media streams that share a common presentation time. We now get a copy of the media // source's presentation descriptor. Applications can use the presentation descriptor to select streams // and to get information about the source content. hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString()); } if (sourcePresentationDescriptor == null) { throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null"); } // Now we get the number of stream descriptors in the presentation. A presentation descriptor contains a list of one or more // stream descriptors. These describe the streams in the presentation. Streams can be either selected or deselected. Only the // selected streams produce data. Deselected streams are not active and do not produce any data. hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out sourceStreamCount); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString()); } if (sourceStreamCount == 0) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0"); } // Look at each stream, there can be more than one stream here // Usually only one is enabled. This app uses the first "selected" // stream we come to which has the appropriate media type for (int i = 0; i < sourceStreamCount; i++) { // we require the major type to be audio Guid guidMajorType = TantaWMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, i); if (guidMajorType != MFMediaType.Audio) { continue; } // we also require the stream to be enabled hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(i, out streamIsSelected, out audioStreamDescriptor); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. Err=" + hr.ToString()); } if (audioStreamDescriptor == null) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null"); } // if the stream is selected, leave now we will release the audioStream descriptor later if (streamIsSelected == true) { audioStreamIndex = i; // record this break; } // release the one we are not using if (audioStreamDescriptor != null) { Marshal.ReleaseComObject(audioStreamDescriptor); audioStreamDescriptor = null; } audioStreamIndex = -1; } // by the time we get here we should have a audioStreamDescriptor if // we do not, then we cannot proceed if (audioStreamDescriptor == null) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null"); } if (audioStreamIndex < 0) { throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamIndex < 0"); } // #### // #### we now create the media sink, we need the type from the stream to do // #### this which is why we wait until now to set it up // #### currentAudioMediaType = TantaWMFUtils.GetCurrentMediaTypeFromStreamDescriptor(audioStreamDescriptor); if (currentAudioMediaType == null) { throw new Exception("PrepareSessionAndTopology call to currentAudioMediaType == null"); } mediaSink = OpenMediaFileSink(outputFileName); if (mediaSink == null) { throw new Exception("PrepareSessionAndTopology call to mediaSink == null"); } // #### // #### we now make up a topology branch for the audio stream // #### // Create a source node for this stream. sourceAudioNode = TantaWMFUtils.CreateSourceNodeForStream(mediaSource, sourcePresentationDescriptor, audioStreamDescriptor); if (sourceAudioNode == null) { throw new Exception("PrepareSessionAndTopology call to CreateSourceNodeForStream failed. pSourceNode == null"); } // Create the output node - this is a file sink in this case. outputSinkNode = TantaWMFUtils.CreateSinkNodeForStream(mediaSink); if (outputSinkNode == null) { throw new Exception("PrepareSessionAndTopology call to CreateOutputNodeForStream failed. outputSinkNode == null"); } // Add the nodes to the topology. First the source hr = pTopology.AddNode(sourceAudioNode); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(sourceAudioNode) failed. Err=" + hr.ToString()); } // then add the output hr = pTopology.AddNode(outputSinkNode); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(outputSinkNode) failed. Err=" + hr.ToString()); } // Connect the output stream from the source node to the input stream of the output node. The parameters are: // dwOutputIndex - Zero-based index of the output stream on this node. // *pDownstreamNode - Pointer to the IMFTopologyNode interface of the node to connect to. // dwInputIndexOnDownstreamNode - Zero-based index of the input stream on the other node. hr = sourceAudioNode.ConnectOutput(0, outputSinkNode, 0); if (hr != HResult.S_OK) { throw new Exception("PrepareSessionAndTopology call to pSourceNode.ConnectOutput failed. Err=" + hr.ToString()); } // Set the topology on the media session. // If SetTopology succeeds, the media session will queue an // MESessionTopologySet event. hr = mediaSession.SetTopology(0, pTopology); MFError.ThrowExceptionForHR(hr); // Release the topology if (pTopology != null) { Marshal.ReleaseComObject(pTopology); } } catch (Exception ex) { LogMessage("Error: " + ex.Message); OISMessageBox(ex.Message); } finally { // Clean up if (pSourceResolver != null) { Marshal.ReleaseComObject(pSourceResolver); } if (sourcePresentationDescriptor != null) { Marshal.ReleaseComObject(sourcePresentationDescriptor); } if (audioStreamDescriptor != null) { Marshal.ReleaseComObject(audioStreamDescriptor); } if (sourceAudioNode != null) { Marshal.ReleaseComObject(sourceAudioNode); } if (outputSinkNode != null) { Marshal.ReleaseComObject(outputSinkNode); } if (currentAudioMediaType != null) { Marshal.ReleaseComObject(currentAudioMediaType); } } }