private void createSession(string sFilePath)
        {
            try
            {
                MFError throwonhr = null;

                if (m_pSession == null)
                {
                    throwonhr = MFExtern.MFCreateMediaSession(null, out m_pSession);
                }
                else
                {
                    Stop();
                }

                // Create the media source.

                CreateMediaSource(sFilePath);

                if (m_pSource == null)
                {
                    return;
                }

                IMFPresentationDescriptor lPresentationDescriptor = null;

                m_pSource.CreatePresentationDescriptor(out lPresentationDescriptor);

                if (lPresentationDescriptor == null)
                {
                    return;
                }

                lPresentationDescriptor.GetUINT64(MFAttributesClsid.MF_PD_DURATION, out mMediaDuration);

                IMFTopology pTopology = null;

                // Create a partial topology.
                CreateTopologyFromSource(out pTopology);

                HResult hr = HResult.S_OK;
                // Set the topology on the media session.
                hr = m_pSession.SetTopology(MFSessionSetTopologyFlags.Immediate, pTopology);

                StartPlayback();
            }
            catch (Exception)
            {
            }
        }
        protected void SetDuration()
        {
            if (m_pSource == null)
            {
                return;
            }


            long duration = 0;
            IMFPresentationDescriptor pPD = null;
            int hr = m_pSource.CreatePresentationDescriptor(out pPD);

            if (hr == 0)
            {
                pPD.GetUINT64(MFAttributesClsid.MF_PD_DURATION, out duration);
            }
            COMBase.SafeRelease(pPD);
            Duration = duration;
        }
Exemplo n.º 3
0
        void ValidateMP4OutputFile(string mp4filepath)
        {
            ulong  duration     = 0;
            uint   videoWidth   = 0;
            uint   videoHeight  = 0;
            double videoFPS     = 0.0;
            uint   videoBitrate = 0;

            try
            {
                IMFMediaSource            mediaSource            = null;
                IMFSourceReader           sourceReader           = null;
                ulong                     videoSize              = 0;
                ulong                     frameRate              = 0;
                MFHelper.IMFMediaType     mediaType              = null;
                IMFPresentationDescriptor presentationDescriptor = null;
                uint   objectType   = default(uint);
                object objectSource = null;

                API.MFStartup();

                // Create the media source using source resolver and the input URL

                IMFSourceResolver sourceResolver = null;
                API.MFCreateSourceResolver(out sourceResolver);

                // sourceResolver.CreateObjectFromURL("..\\..\\Apps\\SmartCam\\SmartRecorder\\Output\\VideoWriterTest\\CreateTestWMVFile_640x480_24fps_15s\\TestMP4File_640x480_24fps_15s.mp4", Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource);
                sourceResolver.CreateObjectFromURL(mp4filepath, Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource);

                mediaSource = (IMFMediaSource)objectSource;

                API.MFCreateSourceReaderFromMediaSource(mediaSource, null, out sourceReader);

                mediaSource.CreatePresentationDescriptor(out presentationDescriptor);

                // Get the duration
                presentationDescriptor.GetUINT64(new Guid(Consts.MF_PD_DURATION), out duration);


                // Get the video width and height
                sourceReader.GetCurrentMediaType(0, out mediaType);

                mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_SIZE), out videoSize);

                videoWidth  = (uint)(videoSize >> 32);
                videoHeight = (uint)(videoSize & 0x00000000FFFFFFFF);

                // Get the Frame Rate
                mediaType.GetUINT64(Guid.Parse(Consts.MF_MT_FRAME_RATE), out frameRate);

                if ((frameRate & 0x00000000FFFFFFFF) != 0)
                {
                    videoFPS = (double)(frameRate >> 32) / (double)(frameRate & 0x00000000FFFFFFFF);
                }

                // Get the encoding bitrate
                mediaType.GetUINT32(new Guid(Consts.MF_MT_AVG_BITRATE), out videoBitrate);

                API.MFShutdown();
            }
            catch (Exception exception)
            {
                Console.WriteLine("Exception failure: {0}", exception.ToString());
                Assert.IsFalse(true);
            }

            Assert.IsFalse(Math.Abs((double)duration - (double)VIDEO_DURATION_IN_100_NS) > (double)VIDEO_DURATION_VAR_IN_100_NS);
            Assert.IsFalse(videoWidth != VIDEO_WIDTH);
            Assert.IsFalse(videoHeight != VIDEO_HEIGHT);
            Assert.IsFalse(Math.Abs(videoFPS - VIDEO_FPS) > VIDEO_FPS_VAR);
            Assert.IsFalse(Math.Abs((int)videoBitrate - VIDEO_ENCODE_BITRATE) > VIDEO_ENCODE_BITRATE_VAR);
        }
        /// <summary>
        ///     Starts the asychronous encode operation
        /// </summary>
        /// <param name="inputURL">Source filename</param>
        /// <param name="outputURL">Targe filename</param>
        /// <param name="audioOutput">Audio format that will be used for audio streams</param>
        /// <param name="videoOutput">Video format that will be used for video streams</param>
        /// <param name="startPosition">Starting position of the contet</param>
        /// <param name="endPosition">Position where the new content will end</param>
        public void Encode(string inputURL, string outputURL, AudioFormat audioOutput, VideoFormat videoOutput, ulong startPosition, ulong endPosition)
        {
            // If busy with other operation ignore and return
            if (this.IsBusy())
            {
                return;
            }

            try
            {
                this.presentationClock = null;
                this.startPosition     = startPosition;
                this.endPosition       = endPosition;

                object objectSource = null;

                // Create the media source using source resolver and the input URL
                uint objectType = default(uint);
                this.mediaSource = null;

                // Init source resolver
                IMFSourceResolver sourceResolver = null;
                MFHelper.MFCreateSourceResolver(out sourceResolver);

                sourceResolver.CreateObjectFromURL(inputURL, Consts.MF_RESOLUTION_MEDIASOURCE, null, out objectType, out objectSource);

                this.mediaSource = (IMFMediaSource)objectSource;

                // Create the media session using a global start time so MF_TOPOLOGY_PROJECTSTOP can be used to stop the session
                this.mediaSession = null;
                IMFAttributes mediaSessionAttributes = null;

                MFHelper.MFCreateAttributes(out mediaSessionAttributes, 1);
                mediaSessionAttributes.SetUINT32(new Guid(Consts.MF_SESSION_GLOBAL_TIME), 1);

                MFHelper.MFCreateMediaSession(mediaSessionAttributes, out this.mediaSession);

                // Create the event handler
                AsyncEventHandler mediaEventHandler = new AsyncEventHandler(this.mediaSession);
                mediaEventHandler.MediaEvent += this.MediaEvent;

                // Get the stream descriptor
                IMFPresentationDescriptor presentationDescriptor = null;
                mediaSource.CreatePresentationDescriptor(out presentationDescriptor);

                // Get the duration
                presentationDescriptor.GetUINT64(new Guid(Consts.MF_PD_DURATION), out this.duration);
                IMFTranscodeProfile transcodeProfile = null;

                Guid containerType = new Guid(Consts.MFTranscodeContainerType_MPEG4);
                if (outputURL.EndsWith(".wmv", StringComparison.OrdinalIgnoreCase) || outputURL.EndsWith(".wma", StringComparison.OrdinalIgnoreCase))
                {
                    containerType = new Guid(Consts.MFTranscodeContainerType_ASF);
                }

                // Generate the transcoding profile
                transcodeProfile = SimpleFastEncode.CreateProfile(audioOutput, videoOutput, containerType);

                // Create the MF topology using the profile
                IMFTopology topology = null;
                MFHelper.MFCreateTranscodeTopology(this.mediaSource, outputURL, transcodeProfile, out topology);

                // Set the end position
                topology.SetUINT64(new Guid(Consts.MF_TOPOLOGY_PROJECTSTART), 0);
                topology.SetUINT64(new Guid(Consts.MF_TOPOLOGY_PROJECTSTOP), (endPosition == 0) ? this.duration : endPosition);

                // Set the session topology
                this.mediaSession.SetTopology((uint)Enums.MFSESSION_SETTOPOLOGY_FLAGS.None, topology);
            }
            catch (Exception ex)
            {
                this.mediaSession = null;

                // Fire the EncodeError event
                if (this.EncodeError != null)
                {
                    this.EncodeError(new Exception(ex.Message, ex));
                }
            }
        }
Exemplo n.º 5
0
        private void Load()
        {
            MediaFoundationInterop.MFCreateSourceResolver(out IMFSourceResolver resolver);
            object unknown;

            try
            {
                resolver.CreateObjectFromURL(URL, SourceResolverFlags.MF_RESOLUTION_MEDIASOURCE | SourceResolverFlags.MF_RESOLUTION_CONTENT_DOES_NOT_HAVE_TO_MATCH_EXTENSION_OR_MIME_TYPE,
                                             null, out _, out unknown);
            }
            catch
            {
                throw new ArgumentException("Unsupported type.");
            }
            MediaFoundationInterop.MFCreateMediaSession(IntPtr.Zero, out m_Session);
            MediaFoundationInterop.MFCreateTopology(out IMFTopology topo);
            IMFMediaSource source = unknown as IMFMediaSource;

            source.CreatePresentationDescriptor(out m_pDescriptor);
            m_pDescriptor.GetUINT64(MediaFoundationAttributes.MF_PD_DURATION, out long dur);
            m_Duration = dur / 10000000;
            m_pDescriptor.GetStreamDescriptorCount(out uint sdcount);
            m_streamcount = (int)sdcount;
            for (uint i = 0; i < m_streamcount; i++)
            {
                m_pDescriptor.GetStreamDescriptorByIndex(i, out bool IsSelected, out IMFStreamDescriptor sd);
                m_DefaultStreamSelect.Add(IsSelected);
                switch (StreamSelectFlag)
                {
                case StreamSelectFlags.SelectAllStream:
                    if (!IsSelected)
                    {
                        m_pDescriptor.SelectStream(i);
                    }
                    break;

                case StreamSelectFlags.SelectNone:
                    if (IsSelected)
                    {
                        m_pDescriptor.DeselectStream(i);
                    }
                    break;
                }
                sd.GetMediaTypeHandler(out IMFMediaTypeHandler typeHandler);
                typeHandler.GetMediaTypeByIndex(0, out IMFMediaType mediaType);
                mediaType.GetMajorType(out Guid streamtype);
                IMFActivate renderer;
                if (streamtype == MediaTypes.MFMediaType_Audio)
                {
                    MediaFoundationInterop.MFCreateAudioRendererActivate(out renderer);
                    mediaType.GetUINT32(MediaFoundationAttributes.MF_MT_AUDIO_SAMPLES_PER_SECOND, out int rate);//SampleRate
                    mediaType.GetUINT32(MediaFoundationAttributes.MF_MT_AUDIO_NUM_CHANNELS, out int channelcount);
                    int samplesize;
                    try
                    {
                        mediaType.GetUINT32(MediaFoundationAttributes.MF_MT_AUDIO_BITS_PER_SAMPLE, out samplesize);
                    }
                    catch (COMException e)
                    {
                        if ((uint)e.HResult != 0xC00D36E6)
                        {
                            throw e;
                        }
                        else
                        {
                            samplesize = 8;
                        }
                    }
                    m_format.Add(new WaveFormat(rate, samplesize, channelcount));
                }
                else
                {
                    continue;
                }
                MediaFoundationInterop.MFCreateTopologyNode(MF_TOPOLOGY_TYPE.MF_TOPOLOGY_SOURCESTREAM_NODE, out IMFTopologyNode sourcenode);
                sourcenode.SetUnknown(MediaFoundationAttributes.MF_TOPONODE_SOURCE, source);
                sourcenode.SetUnknown(MediaFoundationAttributes.MF_TOPONODE_PRESENTATION_DESCRIPTOR, m_pDescriptor);
                sourcenode.SetUnknown(MediaFoundationAttributes.MF_TOPONODE_STREAM_DESCRIPTOR, sd);
                topo.AddNode(sourcenode);
                MediaFoundationInterop.MFCreateTopologyNode(MF_TOPOLOGY_TYPE.MF_TOPOLOGY_OUTPUT_NODE, out IMFTopologyNode outputnode);
                outputnode.SetObject(renderer);
                topo.AddNode(outputnode);
                sourcenode.ConnectOutput(0, outputnode, 0);
            }
            m_Session.SetTopology(0, topo);
            m_Eventthread = new Thread(ProcessEvent);
            m_Eventthread.Start();
        }