Exemple #1
0
        public ConversionProgress()
        {
            // Shutdown Media Foundation
            MFHelper.MFShutdown();

            InitializeComponent();
        }
        /// <summary>
        /// Starts rendering to the target.
        /// </summary>
        /// <param name="videoPixelSize">The pixel size of the video.</param>
        protected override void StartRenderingInternal(Size2 videoPixelSize)
        {
            m_outStreamNet = base.TargetFile.OpenOutputStream();
            m_outStream    = new MF.ByteStream(m_outStreamNet);

            // Pass dummy filename as described here:
            // https://social.msdn.microsoft.com/forums/windowsapps/en-us/49bffa74-4e84-4fd6-9d67-42e8385611b8/video-sinkwriter-in-metro-app
            m_sinkWriter = MF.MediaFactory.CreateSinkWriterFromURL(
                this.DummyFileName, m_outStream.NativePointer, null);
            m_videoPixelSize = videoPixelSize;

            CreateMediaTarget(m_sinkWriter, m_videoPixelSize, out m_streamIndex);

            // Configure input
            using (MF.MediaType mediaTypeIn = new MF.MediaType())
            {
                mediaTypeIn.Set <Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                mediaTypeIn.Set <Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_INPUT_FORMAT);
                mediaTypeIn.Set <int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
                mediaTypeIn.Set <long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
                mediaTypeIn.Set <long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(m_framerate, 1));
                m_sinkWriter.SetInputMediaType(m_streamIndex, mediaTypeIn, null);
            }

            // Start writing the video file
            m_sinkWriter.BeginWriting();

            // Set initial frame index
            m_frameIndex = -1;
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class.
        /// </summary>
        /// <param name="captureDevice">The capture device.</param>
        public MediaFoundationVideoReader(CaptureDeviceInfo captureDevice)
        {
            captureDevice.EnsureNotNullOrDisposed(nameof(captureDevice));

            try
            {
                // Create the source reader
                using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1))
                {
                    // We need the 'EnableVideoProcessing' attribute because of the RGB32 format
                    // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisconnectMediasourceOnShutdown, 1);

                    // Create the MediaSource object by given capture device
                    using (MF.MediaSource mediaSource = captureDevice.CreateMediaSource())
                    {
                        // Create the source reader
                        m_sourceReader = new MF.SourceReader(mediaSource, mediaAttributes);
                    }
                }


                // Apply source configuration
                using (MF.MediaType mediaType = new MF.MediaType())
                {
                    mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                    mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32);
                    m_sourceReader.SetCurrentMediaType(
                        MF.SourceReaderIndex.FirstVideoStream,
                        mediaType);
                    m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true));
                }
                // Read some information about the source
                using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream))
                {
                    long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize);
                    m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong));
                }

                // Get additional properties
                m_durationLong    = 0;
                m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute(
                    MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics);
            }
            catch (Exception)
            {
                this.Dispose();
                throw;
            }
        }
Exemple #4
0
 /// <summary>
 /// Creates a media target.
 /// </summary>
 /// <param name="sinkWriter">The previously created SinkWriter.</param>
 /// <param name="videoPixelSize">The pixel size of the video.</param>
 /// <param name="streamIndex">The stream index for the new target.</param>
 protected override void CreateMediaTarget(MF.SinkWriter sinkWriter, Size2 videoPixelSize, out int streamIndex)
 {
     using (MF.MediaType mediaTypeOut = new MF.MediaType())
     {
         mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
         mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_ENCODING_FORMAT);
         mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.AvgBitrate, base.Bitrate * 1000);
         mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
         mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
         mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(base.Framerate, 1));
         sinkWriter.AddStream(mediaTypeOut, out streamIndex);
     }
 }
Exemple #5
0
        /// <summary>
        ///     Initializes a new instance of the ConversionProgress class
        /// </summary>
        /// <param name="arguments">Encode arguments</param>
        /// <param name="useSinkWriter">False to use the Transcode API, true to use Sink Writer</param>
        public ConversionProgress(Dictionary <string, object> arguments, bool useSinkWriter)
        {
            // Start Media Foundation
            MFHelper.MFStartup();

            this.InitializeComponent();

            this.startTime              = DateTime.Now;
            this.progressTimer          = new DispatcherTimer();
            this.progressTimer.Interval = TimeSpan.FromMilliseconds(500);
            this.progressTimer.Tick    += ProgressTimer_Tick;

            if (useSinkWriter)
            {
                this.encodeWorker = new SimpleSinkWriterEncode();
            }
            else
            {
                this.encodeWorker = new SimpleFastEncode();
            }

            // Start transcoding
            this.StartEncode(arguments);
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class.
        /// </summary>
        /// <param name="videoSource">The source video file.</param>
        public MediaFoundationVideoReader(ResourceLink videoSource)
        {
            videoSource.EnsureNotNull(nameof(videoSource));

            try
            {
                m_videoSource = videoSource;

                // Create the source reader
                using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1))
                {
                    // We need the 'EnableVideoProcessing' attribute because of the RGB32 format
                    // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1);
                    mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1);

                    // Wrap the .net stream to a MF Bytestream
                    m_videoSourceStreamNet = m_videoSource.OpenInputStream();
                    m_videoSourceStream    = new MF.ByteStream(m_videoSourceStreamNet);
                    try
                    {
                        using (MF.MediaAttributes byteStreamAttributes = m_videoSourceStream.QueryInterface <MF.MediaAttributes>())
                        {
                            byteStreamAttributes.Set(MF.ByteStreamAttributeKeys.OriginName, "Dummy." + videoSource.FileExtension);
                        }
                    }
                    catch (SharpDXException)
                    {
                        // The interface MF.MediaAttributes is not available on some platforms
                        // (occured during tests on Windows 7 without Platform Update)
                    }

                    // Create the sourcereader by custom native method (needed because of the ByteStream arg)
                    IntPtr         sourceReaderPointer = IntPtr.Zero;
                    SharpDX.Result sdxResult           = NativeMethods.MFCreateSourceReaderFromByteStream_Native(
                        m_videoSourceStream.NativePointer,
                        mediaAttributes.NativePointer,
                        out sourceReaderPointer);
                    sdxResult.CheckError();

                    m_sourceReader = new MF.SourceReader(sourceReaderPointer);
                }

                // Apply source configuration
                using (MF.MediaType mediaType = new MF.MediaType())
                {
                    mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
                    mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32);
                    m_sourceReader.SetCurrentMediaType(
                        MF.SourceReaderIndex.FirstVideoStream,
                        mediaType);
                    m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true));
                }

                // Read some information about the source
                using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream))
                {
                    long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize);
                    m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong));
                }

                // Get additional propertie3s
                m_durationLong = m_sourceReader.GetPresentationAttribute(
                    MF.SourceReaderIndex.MediaSource, MF.PresentationDescriptionAttributeKeys.Duration);
                m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute(
                    MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics);
            }
            catch (Exception)
            {
                this.Dispose();
                throw;
            }
        }