private void createSession(string sFilePath)
        {
            try
            {
                MFError throwonhr = null;

                if (m_pSession == null)
                {
                    throwonhr = MFExtern.MFCreateMediaSession(null, out m_pSession);
                }
                else
                {
                    Stop();
                }

                // Create the media source.

                CreateMediaSource(sFilePath);

                if (m_pSource == null)
                {
                    return;
                }

                IMFPresentationDescriptor lPresentationDescriptor = null;

                m_pSource.CreatePresentationDescriptor(out lPresentationDescriptor);

                if (lPresentationDescriptor == null)
                {
                    return;
                }

                lPresentationDescriptor.GetUINT64(MFAttributesClsid.MF_PD_DURATION, out mMediaDuration);

                IMFTopology pTopology = null;

                // Create a partial topology.
                CreateTopologyFromSource(out pTopology);

                HResult hr = HResult.S_OK;
                // Set the topology on the media session.
                hr = m_pSession.SetTopology(MFSessionSetTopologyFlags.Immediate, pTopology);

                StartPlayback();
            }
            catch (Exception)
            {
            }
        }
        private static HResult CreateTopology(out IMFTopology topology, out IMFMediaSinkAlt mediaSink, IMFMediaSource source)
        {
            HResult hr = S_OK;

            topology  = null;
            mediaSink = null;
            IMFPresentationDescriptor presentationDescriptor = null;

            hr = MF.CreateTopology(out topology);
            if (Failed(hr))
            {
                return(hr);
            }

            hr = source.CreatePresentationDescriptor(out presentationDescriptor);
            if (Failed(hr))
            {
                topology = null;
                return(hr);
            }

            hr = BuildTopology(out mediaSink, topology, presentationDescriptor, source);
            if (Failed(hr))
            {
                mediaSink = null;
                topology  = null;
                return(hr);
            }

            return(hr);
        }
Beispiel #3
0
        static IMFMetadata GetMetadata(IMFMediaSource mediaSource)
        {
            HResult hr;

            // Get IMFPresentationDescriptor.
            IMFPresentationDescriptor presentationDescriptor;

            hr = mediaSource.CreatePresentationDescriptor(out presentationDescriptor);
            MFError.ThrowExceptionForHR(hr);

            // Get IMFMetadataProvider.
            object provider;

            hr = MFExtern.MFGetService(mediaSource, MFServices.MF_METADATA_PROVIDER_SERVICE, typeof(IMFMetadataProvider).GUID, out provider);
            MFError.ThrowExceptionForHR(hr);
            IMFMetadataProvider metadataProvider = (IMFMetadataProvider)provider;

            // Get IMFMetadata.
            IMFMetadata metadata;

            hr = metadataProvider.GetMFMetadata(presentationDescriptor, 0, 0, out metadata);
            MFError.ThrowExceptionForHR(hr);
            COMBase.SafeRelease(presentationDescriptor);
            presentationDescriptor = null;
            COMBase.SafeRelease(metadataProvider);
            metadataProvider = null;

            return(metadata);
        }
Beispiel #4
0
        ////////////////////////////////////////////////////////////////////////////////////////
        //  Name: CPlayer::CreateTopology (Private)
        //  Description:
        //      Creates a topology for the media source
        //  Parameter:
        //      pMediaSource: [in] Pointer to the media source
        //      pTopology: [in] Receives the partial topology
        /////////////////////////////////////////////////////////////////////////////////////////

        private void CreateTopology(
            IMFMediaSource pMediaSource,
            IMFTopology pTopology)
        {
            Debug.WriteLine("CPlayer::CreateTopology");

            //The caller needs to pass a valid media source
            //We need the media source because to set the source node attribute, media source is needed

            if (pMediaSource == null || pTopology == null)
            {
                throw new COMException("null pointer", (int)HResult.E_POINTER);
            }

            IMFPresentationDescriptor pPresentationDescriptor;

            //Create Presentation Descriptor for the media source
            HResult hr = pMediaSource.CreatePresentationDescriptor(out pPresentationDescriptor);

            MFError.ThrowExceptionForHR(hr);

            try
            {
                CreateNodesForStream(pPresentationDescriptor, pMediaSource, pTopology);
            }
            finally
            {
                SafeRelease(pPresentationDescriptor);
            }
        }
Beispiel #5
0
        static int GetBitRate(IMFMediaSource mediaSource)
        {
            HResult hr;

            // Get IMFPresentationDescriptor.
            IMFPresentationDescriptor presentationDescriptor;

            hr = mediaSource.CreatePresentationDescriptor(out presentationDescriptor);
            MFError.ThrowExceptionForHR(hr);

            // Get IMFStreamDescriptor.
            bool isStreamSelected;
            IMFStreamDescriptor streamDescriptor;

            hr = presentationDescriptor.GetStreamDescriptorByIndex(0, out isStreamSelected, out streamDescriptor);
            MFError.ThrowExceptionForHR(hr);

            // Get bit rate.
            int bitRate;

            hr = streamDescriptor.GetUINT32(MFAttributesClsid.MF_SD_ASF_EXTSTRMPROP_AVG_DATA_BITRATE, out bitRate);
            MFError.ThrowExceptionForHR(hr);
            bitRate /= 1000;
            if (bitRate <= 0)
            {
                throw new ApplicationException($"Invalid bit rate: {bitRate}");
            }

            return(bitRate);
        }
        protected IMFTopology CreateTopologyFromSource()
        {
            int hr = 0;

            //TRACE("CPlayer::CreateTopologyFromSource");

            //Assert(m_pSession != null);
            //Debug.Assert(m_pSource != null);

            IMFTopology pTopology = null;
            IMFPresentationDescriptor pSourcePD = null;
            int cSourceStreams = 0;

            try
            {
                // Create a new topology.
                hr = MFExtern.MFCreateTopology(out pTopology);
                MFError.ThrowExceptionForHR(hr);

                // Create the presentation descriptor for the media source.
                hr = m_pSource.CreatePresentationDescriptor(out pSourcePD);
                MFError.ThrowExceptionForHR(hr);

                // Get the number of streams in the media source.
                hr = pSourcePD.GetStreamDescriptorCount(out cSourceStreams);
                MFError.ThrowExceptionForHR(hr);

                //TRACE(string.Format("Stream count: {0}", cSourceStreams));

                // For each stream, create the topology nodes and add them to the topology.
                for (int i = 0; i < cSourceStreams; i++)
                {
                    AddBranchToPartialTopology(pTopology, pSourcePD, i);
                }
            }
            catch
            {
                // If we failed, release the topology
                COMBase.SafeRelease(pTopology);
                throw;
            }
            finally
            {
                COMBase.SafeRelease(pSourcePD);
            }
            return(pTopology);
        }
Beispiel #7
0
        /// <summary>
        /// Create media Topology
        /// </summary>
        /// <param name="ppTopology"></param>
        protected void CreateTopologyFromSource(out IMFTopology ppTopology)
        {
            TRACE("Media::CreateTopologyFromSource");
            Debug.Assert(m_pSession != null);
            Debug.Assert(m_pSource != null);

            IMFTopology pTopology = null;
            IMFPresentationDescriptor pSourcePD = null;
            int cSourceStreams = 0;

            try
            {
                // Create a new topology.
                MFExtern.MFCreateTopology(out pTopology);

                // Create the presentation descriptor for the media source.
                m_pSource.CreatePresentationDescriptor(out pSourcePD);

                // Get the number of streams in the media source.
                pSourcePD.GetStreamDescriptorCount(out cSourceStreams);

                TRACE(string.Format("Stream count: {0}", cSourceStreams));

                // For each stream, create the topology nodes and add them to the topology.
                for (int i = 0; i < cSourceStreams; i++)
                {
                    AddBranchToPartialTopology(pTopology, pSourcePD, i);
                }

                // Return the IMFTopology pointer to the caller.
                ppTopology = pTopology;
            }
            catch
            {
                // If we failed, release the topology
                SafeRelease(pTopology);
                throw;
            }
            finally
            {
                SafeRelease(pSourcePD);
            }
        }
Beispiel #8
0
        ////////////////////////////////////////////////////////////////////////////////////////
        //  Name: CPlayer::AddTopologyToSequencer (Private)
        //  Description:
        //      Adds the topology to the sequencer
        //  Parameter:
        //      sURL: [in] File URL
        //      pMediaSource: [in] Pointer to the media source
        //      pTopology: [in] Pointer to the topology
        //      pSegmentId: [out] Receives the segment id returned by the sequencer source
        /////////////////////////////////////////////////////////////////////////////////////////

        private void AddTopologyToSequencer(
            string sURL,
            IMFMediaSource pMediaSource,
            IMFTopology pTopology,
            out int pSegmentId)
        {
            Debug.WriteLine("CPlayer::AddTopologyToSequencer");

            if (sURL == null || pMediaSource == null || pTopology == null)
            {
                throw new COMException("null pointer", (int)HResult.E_POINTER);
            }

            long    hnsSegmentDuration = 0;
            long    TopologyID         = 0;
            HResult hr;

            IMFPresentationDescriptor pPresentationDescriptor;

            hr = m_pSequencerSource.AppendTopology(pTopology, 0, out pSegmentId);
            MFError.ThrowExceptionForHR(hr);

            hr = pTopology.GetTopologyID(out TopologyID);
            MFError.ThrowExceptionForHR(hr);

            //create a presentation descriptor
            hr = pMediaSource.CreatePresentationDescriptor(out pPresentationDescriptor);
            MFError.ThrowExceptionForHR(hr);

            //get the segment duration
            hr = pPresentationDescriptor.GetUINT64(MFAttributesClsid.MF_PD_DURATION, out hnsSegmentDuration);
            MFError.ThrowExceptionForHR(hr);

            Debug.Assert(hnsSegmentDuration > 0);

            //store the segment info: SegmentId, SegmentDuration, TopoID in the linked list.
            m_Segments.AddNewSegmentEntry(pSegmentId, hnsSegmentDuration, TopologyID, sURL);
        }
Beispiel #9
0
        ////////////////////////////////////////////////////////////////////////////////////////
        //  Name: CPlayer::AddTopologyToSequencer (Private)
        //  Description:
        //      Adds the topology to the sequencer
        //  Parameter:
        //      sURL: [in] File URL
        //      pMediaSource: [in] Pointer to the media source
        //      pTopology: [in] Pointer to the topology
        //      pSegmentId: [out] Receives the segment id returned by the sequencer source
        /////////////////////////////////////////////////////////////////////////////////////////
        private void AddTopologyToSequencer(
                            string sURL,
                            IMFMediaSource pMediaSource,
                            IMFTopology pTopology,
                            out int pSegmentId)
        {
            Debug.WriteLine("CPlayer::AddTopologyToSequencer");

            if (sURL == null || pMediaSource == null || pTopology == null)
            {
                throw new COMException("null pointer", E_Pointer);
            }

            long hnsSegmentDuration = 0;
            long TopologyID = 0;
            int hr;

            IMFPresentationDescriptor pPresentationDescriptor;

            hr = m_pSequencerSource.AppendTopology(pTopology, 0, out pSegmentId);
            MFError.ThrowExceptionForHR(hr);

            hr = pTopology.GetTopologyID(out TopologyID);
            MFError.ThrowExceptionForHR(hr);

            //create a presentation descriptor
            hr = pMediaSource.CreatePresentationDescriptor(out pPresentationDescriptor);
            MFError.ThrowExceptionForHR(hr);

            //get the segment duration
            hr = pPresentationDescriptor.GetUINT64(MFAttributesClsid.MF_PD_DURATION, out hnsSegmentDuration);
            MFError.ThrowExceptionForHR(hr);

            Debug.Assert(hnsSegmentDuration > 0);

            //store the segment info: SegmentId, SegmentDuration, TopoID in the linked list.
            m_Segments.AddNewSegmentEntry(pSegmentId, hnsSegmentDuration, TopologyID, sURL);
        }
        private void Load()
        {
            MediaFoundationInterop.MFCreateSourceResolver(out IMFSourceResolver resolver);
            object unknown;

            try
            {
                resolver.CreateObjectFromURL(URL, SourceResolverFlags.MF_RESOLUTION_MEDIASOURCE | SourceResolverFlags.MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
                                             null, out _, out unknown);
            }
            catch
            {
                throw new ArgumentException("Unsupported type.");
            }
            MediaFoundationInterop.MFCreateMediaSession(IntPtr.Zero, out m_Session);
            MediaFoundationInterop.MFCreateTopology(out IMFTopology topo);
            IMFMediaSource source = unknown as IMFMediaSource;

            source.CreatePresentationDescriptor(out m_pDescriptor);
            m_pDescriptor.GetUINT64(MediaFoundationAttributes.MF_PD_DURATION, out long dur);
            m_Duration = dur / 10000000;
            m_pDescriptor.GetStreamDescriptorCount(out uint sdcount);
            m_streamcount = (int)sdcount;
            for (uint i = 0; i < m_streamcount; i++)
            {
                m_pDescriptor.GetStreamDescriptorByIndex(i, out bool IsSelected, out IMFStreamDescriptor sd);
                m_DefaultStreamSelect.Add(IsSelected);
                switch (StreamSelectFlag)
                {
                case StreamSelectFlags.SelectAllStream:
                    if (!IsSelected)
                    {
                        m_pDescriptor.SelectStream(i);
                    }
                    break;

                case StreamSelectFlags.SelectNone:
                    if (IsSelected)
                    {
                        m_pDescriptor.DeselectStream(i);
                    }
                    break;
                }
                sd.GetMediaTypeHandler(out IMFMediaTypeHandler typeHandler);
                typeHandler.GetMediaTypeByIndex(0, out IMFMediaType mediaType);
                mediaType.GetMajorType(out Guid streamtype);
                IMFActivate renderer;
                if (streamtype == MediaTypes.MFMediaType_Audio)
                {
                    MediaFoundationInterop.MFCreateAudioRendererActivate(out renderer);
                    mediaType.GetUINT32(MediaFoundationAttributes.MF_MT_AUDIO_SAMPLES_PER_SECOND, out int rate);//SampleRate
                    mediaType.GetUINT32(MediaFoundationAttributes.MF_MT_AUDIO_NUM_CHANNELS, out int channelcount);
                    int samplesize;
                    try
                    {
                        mediaType.GetUINT32(MediaFoundationAttributes.MF_MT_AUDIO_BITS_PER_SAMPLE, out samplesize);
                    }
                    catch (COMException e)
                    {
                        if ((uint)e.HResult != 0xC00D36E6)
                        {
                            throw e;
                        }
                        else
                        {
                            samplesize = 8;
                        }
                    }
                    m_format.Add(new WaveFormat(rate, samplesize, channelcount));
                }
                else
                {
                    continue;
                }
                MediaFoundationInterop.MFCreateTopologyNode(MF_TOPOLOGY_TYPE.MF_TOPOLOGY_SOURCESTREAM_NODE, out IMFTopologyNode sourcenode);
                sourcenode.SetUnknown(MediaFoundationAttributes.MF_TOPONODE_SOURCE, source);
                sourcenode.SetUnknown(MediaFoundationAttributes.MF_TOPONODE_PRESENTATION_DESCRIPTOR, m_pDescriptor);
                sourcenode.SetUnknown(MediaFoundationAttributes.MF_TOPONODE_STREAM_DESCRIPTOR, sd);
                topo.AddNode(sourcenode);
                MediaFoundationInterop.MFCreateTopologyNode(MF_TOPOLOGY_TYPE.MF_TOPOLOGY_OUTPUT_NODE, out IMFTopologyNode outputnode);
                outputnode.SetObject(renderer);
                topo.AddNode(outputnode);
                sourcenode.ConnectOutput(0, outputnode, 0);
            }
            m_Session.SetTopology(0, topo);
            m_Eventthread = new Thread(ProcessEvent);
            m_Eventthread.Start();
        }
Beispiel #11
0
        ///////////////////////////////////////////////////////////////////////
        //  Name: CreateTopology
        //  Description:  Creates the topology.
        //
        //  Note: The first audio stream is conntected to the media sink.
        //        Other streams are deselected.
        ///////////////////////////////////////////////////////////////////////

        static void CreateTopology(IMFMediaSource pSource, IMFMediaSinkAlt pSink, out IMFTopology ppTopology)
        {
            HResult hr;
            IMFPresentationDescriptor pPD = null;
            IMFStreamDescriptor       pSD = null;

            int  cStreams   = 0;
            bool fConnected = false;

            hr = MFExtern.MFCreateTopology(out ppTopology);
            MFError.ThrowExceptionForHR(hr);

            hr = pSource.CreatePresentationDescriptor(out pPD);
            MFError.ThrowExceptionForHR(hr);

            try
            {
                hr = pPD.GetStreamDescriptorCount(out cStreams);
                MFError.ThrowExceptionForHR(hr);

                Guid majorType;
                bool fSelected = false;

                for (int iStream = 0; iStream < cStreams; iStream++)
                {
                    hr = pPD.GetStreamDescriptorByIndex(iStream, out fSelected, out pSD);
                    MFError.ThrowExceptionForHR(hr);

                    try
                    {
                        // If the stream is not selected by default, ignore it.
                        if (!fSelected)
                        {
                            continue;
                        }

                        // Get the major media type.
                        GetStreamMajorType(pSD, out majorType);

                        // If it's not audio, deselect it and continue.
                        if (majorType != MFMediaType.Audio)
                        {
                            // Deselect this stream
                            hr = pPD.DeselectStream(iStream);
                            MFError.ThrowExceptionForHR(hr);

                            continue;
                        }

                        // It's an audio stream, so try to create the topology branch.
                        CreateTopologyBranch(ppTopology, pSource, pPD, pSD, pSink);
                    }
                    finally
                    {
                        if (pSD != null)
                        {
                            Marshal.ReleaseComObject(pSD);
                        }
                    }

                    // Set our status flag.
                    fConnected = true;

                    // At this point we have reached the first audio stream in the
                    // source, so we can stop looking (whether we succeeded or failed).
                    break;
                }
            }
            finally
            {
                if (pPD != null)
                {
                    Marshal.ReleaseComObject(pPD);
                }
            }

            // Even if we succeeded, if we didn't connect any streams, it's a failure.
            // (For example, it might be a video-only source.
            if (!fConnected)
            {
                throw new Exception("No audio streams");
            }
        }
Beispiel #12
0
        void ValidateMP4OutputFile(string mp4filepath)
        {
            ulong  duration     = 0;
            uint   videoWidth   = 0;
            uint   videoHeight  = 0;
            double videoFPS     = 0.0;
            uint   videoBitrate = 0;

            try
            {
                IMFMediaSource            mediaSource            = null;
                IMFSourceReader           sourceReader           = null;
                ulong                     videoSize              = 0;
                ulong                     frameRate              = 0;
                MFHelper.IMFMediaType     mediaType              = null;
                IMFPresentationDescriptor presentationDescriptor = null;
                uint   objectType   = default(uint);
                object objectSource = null;

                API.MFStartup();

                // Create the media source using source resolver and the input URL

                IMFSourceResolver sourceResolver = null;
                API.MFCreateSourceResolver(out sourceResolver);

                // sourceResolver.CreateObjectFromURL("..\\..\\Apps\\SmartCam\\SmartRecorder\\Output\\VideoWriterTest\\CreateTestWMVFile_640x480_24fps_15s\\TestMP4File_640x480_24fps_15s.mp4", Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource);
                sourceResolver.CreateObjectFromURL(mp4filepath, Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource);

                mediaSource = (IMFMediaSource)objectSource;

                API.MFCreateSourceReaderFromMediaSource(mediaSource, null, out sourceReader);

                mediaSource.CreatePresentationDescriptor(out presentationDescriptor);

                // Get the duration
                presentationDescriptor.GetUINT64(new Guid(Consts.MF_PD_DURATION), out duration);


                // Get the video width and height
                sourceReader.GetCurrentMediaType(0, out mediaType);

                mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_SIZE), out videoSize);

                videoWidth  = (uint)(videoSize >> 32);
                videoHeight = (uint)(videoSize & 0x00000000FFFFFFFF);

                // Get the Frame Rate
                mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_RATE), out frameRate);

                if ((frameRate & 0x00000000FFFFFFFF) != 0)
                {
                    videoFPS = (double)(frameRate >> 32) / (double)(frameRate & 0x00000000FFFFFFFF);
                }

                // Get the encoding bitrate
                mediaType.GetUINT32(new Guid(Consts.MF_MT_AVG_BITRATE), out videoBitrate);

                API.MFShutdown();
            }
            catch (Exception exception)
            {
                Console.WriteLine("Exception failure: {0}", exception.ToString());
                Assert.IsFalse(true);
            }

            Assert.IsFalse(Math.Abs((double)duration - (double)VIDEO_DURATION_IN_100_NS) > (double)VIDEO_DURATION_VAR_IN_100_NS);
            Assert.IsFalse(videoWidth != VIDEO_WIDTH);
            Assert.IsFalse(videoHeight != VIDEO_HEIGHT);
            Assert.IsFalse(Math.Abs(videoFPS - VIDEO_FPS) > VIDEO_FPS_VAR);
            Assert.IsFalse(Math.Abs((int)videoBitrate - VIDEO_ENCODE_BITRATE) > VIDEO_ENCODE_BITRATE_VAR);
        }
Beispiel #13
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Opens prepares the media session and topology and opens the media source
        /// and media sink.
        ///
        /// Once the session and topology are setup, a MESessionTopologySet event
        /// will be triggered in the callback handler. After that the events there
        /// trigger other events and everything rolls along automatically.
        /// </summary>
        /// <param name="sourceFileName">the source file name</param>
        /// <param name="outputFileName">the name of the output file</param>
        /// <history>
        ///    01 Nov 18  Cynic - Originally Written
        /// </history>
        public void PrepareSessionAndTopology(string sourceFileName, string outputFileName)
        {
            HResult                   hr;
            IMFSourceResolver         pSourceResolver = null;
            IMFTopology               pTopology       = null;
            IMFPresentationDescriptor sourcePresentationDescriptor = null;
            int sourceStreamCount = 0;
            IMFStreamDescriptor audioStreamDescriptor = null;
            bool            streamIsSelected          = false;
            IMFTopologyNode sourceAudioNode           = null;
            IMFTopologyNode outputSinkNode            = null;
            IMFMediaType    currentAudioMediaType     = null;
            int             audioStreamIndex          = -1;

            LogMessage("PrepareSessionAndTopology ");

            // we sanity check the filenames - the existence of the path and if the file already exists
            // should have been checked before this call
            if ((sourceFileName == null) || (sourceFileName.Length == 0))
            {
                throw new Exception("PrepareSessionAndTopology: source file name is invalid. Cannot continue.");
            }

            if ((outputFileName == null) || (outputFileName.Length == 0))
            {
                throw new Exception("PrepareSessionAndTopology: output file name is invalid. Cannot continue.");
            }

            try
            {
                // reset everything
                CloseAllMediaDevices();

                // Create the media session.
                hr = MFExtern.MFCreateMediaSession(null, out mediaSession);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. Err=" + hr.ToString());
                }
                if (mediaSession == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateMediaSession failed. mediaSession == null");
                }

                // set up our media session call back handler.
                mediaSessionAsyncCallbackHandler = new TantaAsyncCallbackHandler();
                mediaSessionAsyncCallbackHandler.Initialize();
                mediaSessionAsyncCallbackHandler.MediaSession = mediaSession;
                mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackError = HandleMediaSessionAsyncCallBackErrors;
                mediaSessionAsyncCallbackHandler.MediaSessionAsyncCallBackEvent = HandleMediaSessionAsyncCallBackEvent;

                // Register the callback handler with the session and tell it that events can
                // start. This does not actually trigger an event it just lets the media session
                // know that it can now send them if it wishes to do so.
                hr = mediaSession.BeginGetEvent(mediaSessionAsyncCallbackHandler, null);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSession.BeginGetEvent failed. Err=" + hr.ToString());
                }

                // Create a new topology.  A topology describes a collection of media sources, sinks, and transforms that are
                // connected in a certain order. These objects are represented within the topology by topology nodes,
                // which expose the IMFTopologyNode interface. A topology describes the path of multimedia data through these nodes.
                hr = MFExtern.MFCreateTopology(out pTopology);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. Err=" + hr.ToString());
                }
                if (pTopology == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to MFExtern.MFCreateTopology failed. pTopology == null");
                }

                // ####
                // #### we now create the media source, this is an audio file
                // ####

                // use the file name to create the media source for the audio device. Media sources are objects that generate media data.
                // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each
                // media source contains one or more streams, and each stream delivers data of one type, such as audio or video.
                mediaSource = TantaWMFUtils.GetMediaSourceFromFile(sourceFileName);
                if (mediaSource == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSource == null");
                }

                // A presentation is a set of related media streams that share a common presentation time.  We now get a copy of the media
                // source's presentation descriptor. Applications can use the presentation descriptor to select streams
                // and to get information about the source content.
                hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString());
                }
                if (sourcePresentationDescriptor == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null");
                }

                // Now we get the number of stream descriptors in the presentation. A presentation descriptor contains a list of one or more
                // stream descriptors. These describe the streams in the presentation. Streams can be either selected or deselected. Only the
                // selected streams produce data. Deselected streams are not active and do not produce any data.
                hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out sourceStreamCount);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString());
                }
                if (sourceStreamCount == 0)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0");
                }

                // Look at each stream, there can be more than one stream here
                // Usually only one is enabled. This app uses the first "selected"
                // stream we come to which has the appropriate media type
                for (int i = 0; i < sourceStreamCount; i++)
                {
                    // we require the major type to be audio
                    Guid guidMajorType = TantaWMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, i);
                    if (guidMajorType != MFMediaType.Audio)
                    {
                        continue;
                    }

                    // we also require the stream to be enabled
                    hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(i, out streamIsSelected, out audioStreamDescriptor);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. Err=" + hr.ToString());
                    }
                    if (audioStreamDescriptor == null)
                    {
                        throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null");
                    }
                    // if the stream is selected, leave now we will release the audioStream descriptor later
                    if (streamIsSelected == true)
                    {
                        audioStreamIndex = i;  // record this
                        break;
                    }

                    // release the one we are not using
                    if (audioStreamDescriptor != null)
                    {
                        Marshal.ReleaseComObject(audioStreamDescriptor);
                        audioStreamDescriptor = null;
                    }
                    audioStreamIndex = -1;
                }

                // by the time we get here we should have a audioStreamDescriptor if
                // we do not, then we cannot proceed
                if (audioStreamDescriptor == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamDescriptor == null");
                }
                if (audioStreamIndex < 0)
                {
                    throw new Exception("PrepareSessionAndTopology call to sourcePresentationDescriptor.GetStreamDescriptorByIndex failed. audioStreamIndex < 0");
                }

                // ####
                // #### we now create the media sink, we need the type from the stream to do
                // #### this which is why we wait until now to set it up
                // ####

                currentAudioMediaType = TantaWMFUtils.GetCurrentMediaTypeFromStreamDescriptor(audioStreamDescriptor);
                if (currentAudioMediaType == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to currentAudioMediaType == null");
                }

                mediaSink = OpenMediaFileSink(outputFileName);
                if (mediaSink == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to mediaSink == null");
                }

                // ####
                // #### we now make up a topology branch for the audio stream
                // ####

                // Create a source node for this stream.
                sourceAudioNode = TantaWMFUtils.CreateSourceNodeForStream(mediaSource, sourcePresentationDescriptor, audioStreamDescriptor);
                if (sourceAudioNode == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to CreateSourceNodeForStream failed. pSourceNode == null");
                }

                // Create the output node - this is a file sink in this case.
                outputSinkNode = TantaWMFUtils.CreateSinkNodeForStream(mediaSink);
                if (outputSinkNode == null)
                {
                    throw new Exception("PrepareSessionAndTopology call to CreateOutputNodeForStream failed. outputSinkNode == null");
                }

                // Add the nodes to the topology. First the source
                hr = pTopology.AddNode(sourceAudioNode);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(sourceAudioNode) failed. Err=" + hr.ToString());
                }

                // then add the output
                hr = pTopology.AddNode(outputSinkNode);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to pTopology.AddNode(outputSinkNode) failed. Err=" + hr.ToString());
                }

                // Connect the output stream from the source node to the input stream of the output node. The parameters are:
                //    dwOutputIndex  -  Zero-based index of the output stream on this node.
                //    *pDownstreamNode  -  Pointer to the IMFTopologyNode interface of the node to connect to.
                //    dwInputIndexOnDownstreamNode  -  Zero-based index of the input stream on the other node.
                hr = sourceAudioNode.ConnectOutput(0, outputSinkNode, 0);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("PrepareSessionAndTopology call to  pSourceNode.ConnectOutput failed. Err=" + hr.ToString());
                }

                // Set the topology on the media session.
                // If SetTopology succeeds, the media session will queue an
                // MESessionTopologySet event.
                hr = mediaSession.SetTopology(0, pTopology);
                MFError.ThrowExceptionForHR(hr);

                // Release the topology
                if (pTopology != null)
                {
                    Marshal.ReleaseComObject(pTopology);
                }
            }
            catch (Exception ex)
            {
                LogMessage("Error: " + ex.Message);
                OISMessageBox(ex.Message);
            }
            finally
            {
                // Clean up
                if (pSourceResolver != null)
                {
                    Marshal.ReleaseComObject(pSourceResolver);
                }
                if (sourcePresentationDescriptor != null)
                {
                    Marshal.ReleaseComObject(sourcePresentationDescriptor);
                }
                if (audioStreamDescriptor != null)
                {
                    Marshal.ReleaseComObject(audioStreamDescriptor);
                }
                if (sourceAudioNode != null)
                {
                    Marshal.ReleaseComObject(sourceAudioNode);
                }
                if (outputSinkNode != null)
                {
                    Marshal.ReleaseComObject(outputSinkNode);
                }
                if (currentAudioMediaType != null)
                {
                    Marshal.ReleaseComObject(currentAudioMediaType);
                }
            }
        }
        /// <summary>
        ///     Starts the asychronous encode operation
        /// </summary>
        /// <param name="inputURL">Source filename</param>
        /// <param name="outputURL">Targe filename</param>
        /// <param name="audioOutput">Audio format that will be used for audio streams</param>
        /// <param name="videoOutput">Video format that will be used for video streams</param>
        /// <param name="startPosition">Starting position of the contet</param>
        /// <param name="endPosition">Position where the new content will end</param>
        public void Encode(string inputURL, string outputURL, AudioFormat audioOutput, VideoFormat videoOutput, ulong startPosition, ulong endPosition)
        {
            // If busy with other operation ignore and return
            if (this.IsBusy())
            {
                return;
            }

            try
            {
                this.presentationClock = null;
                this.startPosition     = startPosition;
                this.endPosition       = endPosition;

                object objectSource = null;

                // Create the media source using source resolver and the input URL
                uint objectType = default(uint);
                this.mediaSource = null;

                // Init source resolver
                IMFSourceResolver sourceResolver = null;
                MFHelper.MFCreateSourceResolver(out sourceResolver);

                sourceResolver.CreateObjectFromURL(inputURL, Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource);

                this.mediaSource = (IMFMediaSource)objectSource;

                // Create the media session using a global start time so MF_TOPOLOGY_PROJECTSTOP can be used to stop the session
                this.mediaSession = null;
                IMFAttributes mediaSessionAttributes = null;

                MFHelper.MFCreateAttributes(out mediaSessionAttributes, 1);
                mediaSessionAttributes.SetUINT32(new Guid(Consts.MF_SESSION_GLOBAL_TIME), 1);

                MFHelper.MFCreateMediaSession(mediaSessionAttributes, out this.mediaSession);

                // Create the event handler
                AsyncEventHandler mediaEventHandler = new AsyncEventHandler(this.mediaSession);
                mediaEventHandler.MediaEvent += this.MediaEvent;

                // Get the stream descriptor
                IMFPresentationDescriptor presentationDescriptor = null;
                mediaSource.CreatePresentationDescriptor(out presentationDescriptor);

                // Get the duration
                presentationDescriptor.GetUINT64(new Guid(Consts.MF_PD_DURATION), out this.duration);
                IMFTranscodeProfile transcodeProfile = null;

                Guid containerType = new Guid(Consts.MFTranscodeContainerType_MPEG4);
                if (outputURL.EndsWith(".wmv", StringComparison.OrdinalIgnoreCase) || outputURL.EndsWith(".wma", StringComparison.OrdinalIgnoreCase))
                {
                    containerType = new Guid(Consts.MFTranscodeContainerType_ASF);
                }

                // Generate the transcoding profile
                transcodeProfile = SimpleFastEncode.CreateProfile(audioOutput, videoOutput, containerType);

                // Create the MF topology using the profile
                IMFTopology topology = null;
                MFHelper.MFCreateTranscodeTopology(this.mediaSource, outputURL, transcodeProfile, out topology);

                // Set the end position
                topology.SetUINT64(new Guid(Consts.MF_TOPOLOGY_PROJECTSTART), 0);
                topology.SetUINT64(new Guid(Consts.MF_TOPOLOGY_PROJECTSTOP), (endPosition == 0) ? this.duration : endPosition);

                // Set the session topology
                this.mediaSession.SetTopology((uint)Enums.MFSESSION_SETTOPOLOGY_FLAGS.None, topology);
            }
            catch (Exception ex)
            {
                this.mediaSession = null;

                // Fire the EncodeError event
                if (this.EncodeError != null)
                {
                    this.EncodeError(new Exception(ex.Message, ex));
                }
            }
        }
Beispiel #15
0
        /// +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=
        /// <summary>
        /// Displays the video formats for the currently selected video device. This
        /// is more complicated than it looks. We have to open the video source, convert
        /// that to a Media Source and then interrogate the that source to find a list
        /// of video formats.
        ///
        /// NOTE: this function will throw exceptions - caller must trap them
        /// </summary>
        /// <history>
        ///    01 Nov 18  Cynic - Started
        /// </history>
        private void DisplayVideoFormatsForCurrentCaptureDevice()
        {
            IMFPresentationDescriptor sourcePresentationDescriptor = null;
            int  sourceStreamCount = 0;
            bool streamIsSelected  = false;
            IMFStreamDescriptor videoStreamDescriptor = null;
            IMFMediaTypeHandler typeHandler           = null;
            int mediaTypeCount = 0;

            List <TantaMFVideoFormatContainer> formatList = new List <TantaMFVideoFormatContainer>();
            HResult        hr;
            IMFMediaSource mediaSource = null;

            try
            {
                // clear what we have now
                listViewSupportedFormats.Clear();
                // reset this
                listViewSupportedFormats.ListViewItemSorter = null;

                // get the currently selected device
                TantaMFDevice currentDevice = (TantaMFDevice)comboBoxCaptureDevices.SelectedItem;
                if (currentDevice == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice currentDevice == null");
                }

                // use the device symbolic name to create the media source for the video device. Media sources are objects that generate media data.
                // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each
                // media source contains one or more streams, and each stream delivers data of one type, such as audio or video.
                mediaSource = TantaWMFUtils.GetMediaSourceFromTantaDevice(currentDevice);
                if (mediaSource == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource == null");
                }

                // A presentation is a set of related media streams that share a common presentation time.
                // we don't need that functionality in this app but we do need to presentation descriptor
                // to find out the stream descriptors, these will give us the media types on offer
                hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString());
                }
                if (sourcePresentationDescriptor == null)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null");
                }

                // Now we get the number of stream descriptors in the presentation.
                // A presentation descriptor contains a list of one or more
                // stream descriptors.
                hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out sourceStreamCount);
                if (hr != HResult.S_OK)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString());
                }
                if (sourceStreamCount == 0)
                {
                    throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0");
                }

                // look for the video stream
                for (int i = 0; i < sourceStreamCount; i++)
                {
                    // we require the major type to be video
                    Guid guidMajorType = TantaWMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, i);
                    if (guidMajorType != MFMediaType.Video)
                    {
                        continue;
                    }

                    // we also require the stream to be enabled
                    hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(i, out streamIsSelected, out videoStreamDescriptor);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. Err=" + hr.ToString());
                    }
                    if (videoStreamDescriptor == null)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. videoStreamDescriptor == null");
                    }
                    // if the stream is not selected (enabled) look for the next
                    if (streamIsSelected == false)
                    {
                        Marshal.ReleaseComObject(videoStreamDescriptor);
                        videoStreamDescriptor = null;
                        continue;
                    }

                    // Get the media type handler for the stream. IMFMediaTypeHandler
                    // interface is a standard way of looking at the media types on an stream
                    hr = videoStreamDescriptor.GetMediaTypeHandler(out typeHandler);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. Err=" + hr.ToString());
                    }
                    if (typeHandler == null)
                    {
                        throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. typeHandler == null");
                    }
                    // Now we get the number of media types in the stream descriptor.
                    hr = typeHandler.GetMediaTypeCount(out mediaTypeCount);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. Err=" + hr.ToString());
                    }
                    if (mediaTypeCount == 0)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. mediaTypeCount == 0");
                    }

                    // now loop through each media type
                    for (int mediaTypeId = 0; mediaTypeId < mediaTypeCount; mediaTypeId++)
                    {
                        // Now we have the handler, get the media type.
                        IMFMediaType workingMediaType = null;
                        hr = typeHandler.GetMediaTypeByIndex(mediaTypeId, out workingMediaType);
                        if (hr != HResult.S_OK)
                        {
                            throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. Err=" + hr.ToString());
                        }
                        if (workingMediaType == null)
                        {
                            throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. workingMediaType == null");
                        }
                        TantaMFVideoFormatContainer tmpContainer = TantaMediaTypeInfo.GetVideoFormatContainerFromMediaTypeObject(workingMediaType, currentDevice);
                        if (tmpContainer == null)
                        {
                            // we failed
                            throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetVideoFormatContainerFromMediaTypeObject");
                        }
                        // now add it
                        formatList.Add(tmpContainer);
                        Marshal.ReleaseComObject(workingMediaType);
                        workingMediaType = null;
                    }

                    // NOTE: we only do the first enabled video stream we find.
                    // it is possible to have more but our control
                    // cannot cope with that
                    break;
                }

                // now display the formats
                foreach (TantaMFVideoFormatContainer videoFormat in formatList)
                {
                    ListViewItem lvi = new ListViewItem(new[] { videoFormat.SubTypeAsString, videoFormat.FrameSizeAsString, videoFormat.FrameRateAsString, videoFormat.FrameRateMaxAsString, videoFormat.AllAttributes });
                    lvi.Tag = videoFormat;
                    listViewSupportedFormats.Items.Add(lvi);
                }

                listViewSupportedFormats.Columns.Add("Type", 70);
                listViewSupportedFormats.Columns.Add("FrameSize WxH", 100);
                listViewSupportedFormats.Columns.Add("FrameRate f/s", 100);
                listViewSupportedFormats.Columns.Add("FrameRateMax f/s", 100);
                listViewSupportedFormats.Columns.Add("All Attributes", 2500);
            }
            finally
            {
                // close and release
                if (mediaSource != null)
                {
                    Marshal.ReleaseComObject(mediaSource);
                    mediaSource = null;
                }
                if (sourcePresentationDescriptor != null)
                {
                    Marshal.ReleaseComObject(sourcePresentationDescriptor);
                    sourcePresentationDescriptor = null;
                }
                if (videoStreamDescriptor != null)
                {
                    Marshal.ReleaseComObject(videoStreamDescriptor);
                    videoStreamDescriptor = null;
                }
                if (typeHandler != null)
                {
                    Marshal.ReleaseComObject(typeHandler);
                    typeHandler = null;
                }
            }
        }
Beispiel #16
0
        private List <MFVideoFormatContainer> GetSupportedFormats(int SourceIndex, string FriendlyName)
        {
            MFDevice        UnderlyingDevice = null;
            List <MFDevice> vcDevices        = WMFUtils.GetDevicesByCategory(MFAttributesClsid.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, CLSID.MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);

            foreach (MFDevice device in vcDevices)
            {
                if (device.FriendlyName == FriendlyName)
                {
                    UnderlyingDevice = device;
                    break;
                }
            }
            if (UnderlyingDevice != null)
            {
                IMFPresentationDescriptor     sourcePresentationDescriptor = null;
                IMFStreamDescriptor           videoStreamDescriptor        = null;
                IMFMediaTypeHandler           typeHandler = null;
                List <MFVideoFormatContainer> formatList  = new List <MFVideoFormatContainer>();
                HResult        hr;
                IMFMediaSource mediaSource = null;
                try
                {
                    // use the device symbolic name to create the media source for the video device. Media sources are objects that generate media data.
                    // For example, the data might come from a video file, a network stream, or a hardware device, such as a camera. Each
                    // media source contains one or more streams, and each stream delivers data of one type, such as audio or video.
                    mediaSource = WMFUtils.GetMediaSourceFromDevice(UnderlyingDevice);
                    if (mediaSource == null)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource == null");
                    }
                    // A presentation is a set of related media streams that share a common presentation time.
                    // we don't need that functionality in this app but we do need to presentation descriptor
                    // to find out the stream descriptors, these will give us the media types on offer
                    hr = mediaSource.CreatePresentationDescriptor(out sourcePresentationDescriptor);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. Err=" + hr.ToString());
                    }
                    if (sourcePresentationDescriptor == null)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to mediaSource.CreatePresentationDescriptor failed. sourcePresentationDescriptor == null");
                    }
                    // Now we get the number of stream descriptors in the presentation.
                    // A presentation descriptor contains a list of one or more
                    // stream descriptors.
                    hr = sourcePresentationDescriptor.GetStreamDescriptorCount(out int sourceStreamCount);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. Err=" + hr.ToString());
                    }
                    if (sourceStreamCount == 0)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorCount failed. sourceStreamCount == 0");
                    }
                    // look for the video stream
                    // we require the major type to be video
                    Guid guidMajorType = WMFUtils.GetMajorMediaTypeFromPresentationDescriptor(sourcePresentationDescriptor, SourceIndex);
                    if (guidMajorType != MFMediaType.Video)
                    {
                        return(new List <MFVideoFormatContainer>());
                    }
                    // we also require the stream to be enabled
                    sourcePresentationDescriptor.SelectStream(1);
                    hr = sourcePresentationDescriptor.GetStreamDescriptorByIndex(SourceIndex, out bool streamIsSelected, out videoStreamDescriptor);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. Err=" + hr.ToString());
                    }
                    if (videoStreamDescriptor == null)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to sourcePresentationDescriptor.GetStreamDescriptorByIndex(v) failed. videoStreamDescriptor == null");
                    }
                    // if the stream is not selected (enabled) look for the next
                    if (streamIsSelected == false)
                    {
                        Marshal.ReleaseComObject(videoStreamDescriptor);
                        videoStreamDescriptor = null;
                        return(new List <MFVideoFormatContainer>());
                    }
                    // Get the media type handler for the stream. IMFMediaTypeHandler
                    // interface is a standard way of looking at the media types on an stream
                    hr = videoStreamDescriptor.GetMediaTypeHandler(out typeHandler);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. Err=" + hr.ToString());
                    }
                    if (typeHandler == null)
                    {
                        throw new Exception("call to videoStreamDescriptor.GetMediaTypeHandler failed. typeHandler == null");
                    }
                    // Now we get the number of media types in the stream descriptor.
                    hr = typeHandler.GetMediaTypeCount(out int mediaTypeCount);
                    if (hr != HResult.S_OK)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. Err=" + hr.ToString());
                    }
                    if (mediaTypeCount == 0)
                    {
                        throw new Exception("DisplayVideoFormatsForCurrentCaptureDevice call to typeHandler.GetMediaTypeCount failed. mediaTypeCount == 0");
                    }
                    // now loop through each media type
                    for (int mediaTypeId = 0; mediaTypeId < mediaTypeCount; mediaTypeId++)
                    {
                        // Now we have the handler, get the media type.
                        hr = typeHandler.GetMediaTypeByIndex(mediaTypeId, out IMFMediaType workingMediaType);
                        if (hr != HResult.S_OK)
                        {
                            throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. Err=" + hr.ToString());
                        }
                        if (workingMediaType == null)
                        {
                            throw new Exception("GetMediaTypeFromStreamDescriptorById call to typeHandler.GetMediaTypeByIndex failed. workingMediaType == null");
                        }
                        MFVideoFormatContainer tmpContainer = MediaTypeInfo.GetVideoFormatContainerFromMediaTypeObject(workingMediaType, UnderlyingDevice);
                        if (tmpContainer == null)
                        {
                            // we failed
                            throw new Exception("GetSupportedVideoFormatsFromSourceReaderInFormatContainers failed on call to GetVideoFormatContainerFromMediaTypeObject");
                        }
                        // now add it
                        formatList.Add(tmpContainer);
                        Marshal.ReleaseComObject(workingMediaType);
                        workingMediaType = null;
                    }
                    return(formatList);
                }
                finally
                {
                    // close and release
                    if (mediaSource != null)
                    {
                        Marshal.ReleaseComObject(mediaSource);
                    }
                    if (sourcePresentationDescriptor != null)
                    {
                        Marshal.ReleaseComObject(sourcePresentationDescriptor);
                    }
                    if (videoStreamDescriptor != null)
                    {
                        Marshal.ReleaseComObject(videoStreamDescriptor);
                    }
                    if (typeHandler != null)
                    {
                        Marshal.ReleaseComObject(typeHandler);
                    }
                }
            }
            return(new List <MFVideoFormatContainer>());
        }
Beispiel #17
0
        ////////////////////////////////////////////////////////////////////////////////////////
        //  Name: CPlayer::CreateTopology (Private)
        //  Description:
        //      Creates a topology for the media source
        //  Parameter:
        //      pMediaSource: [in] Pointer to the media source
        //      pTopology: [in] Receives the partial topology
        /////////////////////////////////////////////////////////////////////////////////////////
        private void CreateTopology(
                            IMFMediaSource pMediaSource,
                            IMFTopology pTopology)
        {
            Debug.WriteLine("CPlayer::CreateTopology");

            //The caller needs to pass a valid media source
            //We need the media source because to set the source node attribute, media source is needed

            if (pMediaSource == null || pTopology == null)
            {
                throw new COMException("null pointer", E_Pointer);
            }

            IMFPresentationDescriptor pPresentationDescriptor;

            //Create Presentation Descriptor for the media source
            int hr = pMediaSource.CreatePresentationDescriptor(out pPresentationDescriptor);
            MFError.ThrowExceptionForHR(hr);

            try
            {
                CreateNodesForStream(pPresentationDescriptor, pMediaSource, pTopology);
            }
            finally
            {
                SafeRelease(pPresentationDescriptor);
            }
        }
Beispiel #18
0
        ///////////////////////////////////////////////////////////////////////
        //  Name: CreateTopology
        //  Description:  Creates the topology.
        //
        //  Note: The first audio stream is conntected to the media sink.
        //        Other streams are deselected.
        ///////////////////////////////////////////////////////////////////////
        static void CreateTopology(IMFMediaSource pSource, IMFMediaSinkAlt pSink, out IMFTopology ppTopology)
        {
            int hr;
            IMFPresentationDescriptor pPD = null;
            IMFStreamDescriptor pSD = null;

            int cStreams = 0;
            bool fConnected = false;

            hr = MFExtern.MFCreateTopology(out ppTopology);
            MFError.ThrowExceptionForHR(hr);

            hr = pSource.CreatePresentationDescriptor(out pPD);
            MFError.ThrowExceptionForHR(hr);

            try
            {
                hr = pPD.GetStreamDescriptorCount(out cStreams);
                MFError.ThrowExceptionForHR(hr);

                Guid majorType;
                bool fSelected = false;

                for (int iStream = 0; iStream < cStreams; iStream++)
                {
                    hr = pPD.GetStreamDescriptorByIndex(iStream, out fSelected, out pSD);
                    MFError.ThrowExceptionForHR(hr);

                    try
                    {
                        // If the stream is not selected by default, ignore it.
                        if (!fSelected)
                        {
                            continue;
                        }

                        // Get the major media type.
                        GetStreamMajorType(pSD, out majorType);

                        // If it's not audio, deselect it and continue.
                        if (majorType != MFMediaType.Audio)
                        {
                            // Deselect this stream
                            hr = pPD.DeselectStream(iStream);
                            MFError.ThrowExceptionForHR(hr);

                            continue;
                        }

                        // It's an audio stream, so try to create the topology branch.
                        CreateTopologyBranch(ppTopology, pSource, pPD, pSD, pSink);
                    }
                    finally
                    {
                        if (pSD != null)
                        {
                            Marshal.ReleaseComObject(pSD);
                        }
                    }

                    // Set our status flag.
                    fConnected = true;

                    // At this point we have reached the first audio stream in the
                    // source, so we can stop looking (whether we succeeded or failed).
                    break;
                }
            }
            finally
            {
                if (pPD != null)
                {
                    Marshal.ReleaseComObject(pPD);
                }
            }

            // Even if we succeeded, if we didn't connect any streams, it's a failure.
            // (For example, it might be a video-only source.
            if (!fConnected)
            {
                throw new Exception("No audio streams");
            }
        }