Ejemplo n.º 1
0
        SinkStream AddStream(SinkWriter sinkWriter, MediaType input, MediaType encoding)
        {
            var sinkStream = sinkWriter.AddStream(encoding);

            sinkStream.InputMediaType = input;
            return(sinkStream);
        }
Ejemplo n.º 2
0
        public MfAudioWriter(string FileName,
                             Guid MediaSubtype,
                             WaveFormat Wf,
                             int AudioQuality)
        {
            _writer = MediaFactory.CreateSinkWriterFromURL(FileName, null, null);

            _audioInBytesPerSecond = Wf.SampleRate * Wf.Channels * Wf.BitsPerSample / 8;

            using (var audioTypeOut = MfWriter.GetMediaType(Wf))
            {
                audioTypeOut.Set(MediaTypeAttributeKeys.Subtype, MediaSubtype);

                if (MediaSubtype == AudioFormatGuids.Aac)
                {
                    audioTypeOut.Set(MediaTypeAttributeKeys.AudioAvgBytesPerSecond, MfWriter.GetAacBitrate(AudioQuality));
                }

                _writer.AddStream(audioTypeOut, out _);
            }

            using (var audioTypeIn = MfWriter.GetMediaType(Wf))
            {
                audioTypeIn.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm);
                _writer.SetInputMediaType(StreamIndex, audioTypeIn, null);
            }

            _writer.BeginWriting();
        }
Ejemplo n.º 3
0
 protected override void CreateMediaTarget(SinkWriter sinkWriter, Size2 videoPixelSize, out int streamIndex)
 {
     using (MF.MediaType mediaTypeOut = new MF.MediaType())
     {
         mediaTypeOut.Set<Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
         mediaTypeOut.Set<Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_ENCODING_FORMAT);
         mediaTypeOut.Set<int>(MF.MediaTypeAttributeKeys.AvgBitrate, Bitrate);
         mediaTypeOut.Set<int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
         mediaTypeOut.Set<long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
         mediaTypeOut.Set<long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(Framerate, 1));
         sinkWriter.AddStream(mediaTypeOut, out streamIndex);
     }
 }
Ejemplo n.º 4
0
 protected override void CreateMediaTarget(SinkWriter sinkWriter, Size2 videoPixelSize, out int streamIndex)
 {
     using (MF.MediaType mediaTypeOut = new MF.MediaType())
     {
         mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video);
         mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_ENCODING_FORMAT);
         mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.AvgBitrate, Bitrate);
         mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive);
         mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height));
         mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(Framerate, 1));
         sinkWriter.AddStream(mediaTypeOut, out streamIndex);
     }
 }
Ejemplo n.º 5
0
        /// <summary>
        ///     Sets and initializes the targetstream for the encoding process.
        /// </summary>
        /// <param name="stream">Stream which should be used as the targetstream.</param>
        /// <param name="inputMediaType">Mediatype of the raw input data to encode.</param>
        /// <param name="targetMediaType">Mediatype of the encoded data.</param>
        /// <param name="containerType">Container type which should be used.</param>
        protected void SetTargetStream(Stream stream, MediaType inputMediaType, MediaType targetMediaType,
                                       Guid containerType)
        {
            MediaAttributes attributes = null;

            try
            {
                var buffer = new byte[stream.Length];
                stream.Read(buffer, 0, buffer.Length);
                _targetStream = new ByteStream(buffer);

                attributes = new MediaAttributes(2);
                attributes.Set(MediaFoundationAttributes.MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 1);
                attributes.Set(MediaFoundationAttributes.MF_TRANSCODE_CONTAINERTYPE, containerType);

                _sinkWriter = SinkWriter.Create(_targetStream, attributes);

                _streamIndex = _sinkWriter.AddStream(targetMediaType);
                _sinkWriter.SetInputMediaType(_streamIndex, inputMediaType, null);

                _targetMediaType      = targetMediaType;
                _sourceBytesPerSecond = inputMediaType.AverageBytesPerSecond;

                //initialize the sinkwriter
                _sinkWriter.BeginWriting();
            }
            catch (Exception)
            {
                if (_sinkWriter != null)
                {
                    _sinkWriter.Dispose();
                    _sinkWriter = null;
                }
                if (_targetStream != null)
                {
                    _targetStream.Dispose();
                    _targetStream = null;
                }
                throw;
            }
            finally
            {
                if (attributes != null)
                {
                    attributes.Dispose();
                }
            }
        }
Ejemplo n.º 6
0
        public MfWriter(VideoWriterArgs Args, Device Device)
        {
            if (Args.ImageProvider.EditorType == typeof(Direct2DEditor))
            {
                _inputFormat = VideoFormatGuids.NV12;
            }
            else
            {
                _inputFormat = VideoFormatGuids.Rgb32;
            }

            _device = Device;

            _frameDuration = TenPower7 / Args.FrameRate;

            var attr = GetSinkWriterAttributes(Device);

            _writer = MediaFactory.CreateSinkWriterFromURL(Args.FileName, null, attr);

            var w = Args.ImageProvider.Width;
            var h = Args.ImageProvider.Height;

            _bufferSize = w * h * 4;

            using (var mediaTypeOut = new MediaType())
            {
                mediaTypeOut.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                mediaTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodingFormat);
                mediaTypeOut.Set(MediaTypeAttributeKeys.AvgBitrate, BitRate);
                mediaTypeOut.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                mediaTypeOut.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h));
                mediaTypeOut.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1));
                mediaTypeOut.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1));
                _writer.AddStream(mediaTypeOut, out _);
            }

            using (var mediaTypeIn = new MediaType())
            {
                mediaTypeIn.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                mediaTypeIn.Set(MediaTypeAttributeKeys.Subtype, _inputFormat);
                mediaTypeIn.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                mediaTypeIn.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h));
                mediaTypeIn.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1));
                mediaTypeIn.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1));
                mediaTypeIn.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1);

                var encoderParams = new MediaAttributes(2);
                encoderParams.Set(RateControlModeKey, RateControlMode.Quality);
                encoderParams.Set(QualityKey, Args.VideoQuality);
                _writer.SetInputMediaType(VideoStreamIndex, mediaTypeIn, encoderParams);
            }

            if (Args.AudioProvider != null)
            {
                var wf = Args.AudioProvider.WaveFormat;
                _audioInBytesPerSecond = wf.SampleRate * wf.Channels * wf.BitsPerSample / 8;

                using (var audioTypeOut = GetMediaType(wf))
                {
                    audioTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodedAudioFormat);
                    audioTypeOut.Set(MediaTypeAttributeKeys.AudioAvgBytesPerSecond, GetAacBitrate(Args.AudioQuality));
                    _writer.AddStream(audioTypeOut, out _);
                }

                using (var audioTypeIn = GetMediaType(wf))
                {
                    audioTypeIn.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm);
                    _writer.SetInputMediaType(AudioStreamIndex, audioTypeIn, null);
                }
            }

            _writer.BeginWriting();

            _copyTexture = new Texture2D(Device, new Texture2DDescription
            {
                CpuAccessFlags    = CpuAccessFlags.Read,
                BindFlags         = BindFlags.None,
                Format            = Format.B8G8R8A8_UNorm,
                Width             = w,
                Height            = h,
                OptionFlags       = ResourceOptionFlags.None,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Staging
            });

            _sample = MediaFactory.CreateVideoSampleFromSurface(null);

            // Create the media buffer from the texture
            MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, _copyTexture, 0, false, out _mediaBuffer);

            using (var buffer2D = _mediaBuffer.QueryInterface <Buffer2D>())
                _mediaBuffer.CurrentLength = buffer2D.ContiguousLength;

            // Attach the created buffer to the sample
            _sample.AddBuffer(_mediaBuffer);
        }
Ejemplo n.º 7
0
        public void Setup(string fileName, MfVideoArgs Args)
        {
            logger.Debug("MfWriter::Init(...)");

            var inputFormat = VideoFormatGuids.NV12;

            //  var inputFormat = VideoFormatGuids.Rgb32; // VideoFormatGuids.NV12


            frameDuration = 10_000_000 / Args.FrameRate;

            var width   = Args.Width;
            var height  = Args.Height;
            var bufSize = width * height * 4;


            try
            {
                using (var attr = new MediaAttributes(6))
                {
                    attr.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1);
                    attr.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0);
                    attr.Set(TranscodeAttributeKeys.TranscodeContainertype, TranscodeContainerTypeGuids.Mpeg4);
                    attr.Set(SinkWriterAttributeKeys.LowLatency, true);
                    attr.Set(SinkWriterAttributeKeys.DisableThrottling, 1);

                    using (var devMan = new DXGIDeviceManager())
                    {
                        devMan.ResetDevice(device);
                        attr.Set(SinkWriterAttributeKeys.D3DManager, devMan);
                    }

                    sinkWriter = MediaFactory.CreateSinkWriterFromURL(fileName, null, attr);
                }

                using (var outputMediaType = new MediaType())
                {
                    outputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                    outputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.H264);
                    outputMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, 8_000_000);
                    outputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    outputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height));
                    outputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate);
                    outputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1));


                    sinkWriter.AddStream(outputMediaType, out videoStreamIndex);

                    Debug.WriteLine("mediaTypeOut " + videoStreamIndex);
                }


                using (var inputMediaType = new MediaType())
                {
                    inputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                    inputMediaType.Set(MediaTypeAttributeKeys.Subtype, inputFormat);
                    inputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    inputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height));
                    inputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate);
                    inputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1));

                    inputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1);

                    using (var encoderParams = new MediaAttributes(2))
                    {
                        encoderParams.Set(CodecApiPropertyKeys.AVEncCommonRateControlMode, RateControlMode.Quality);
                        encoderParams.Set(CodecApiPropertyKeys.AVEncCommonQuality, Args.Quality);

                        sinkWriter.SetInputMediaType(0, inputMediaType, encoderParams);
                    }
                }

                bufTexture = new Texture2D(device, new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.Read,
                    BindFlags         = BindFlags.None,
                    Format            = Format.B8G8R8A8_UNorm,
                    Width             = width,
                    Height            = height,
                    OptionFlags       = ResourceOptionFlags.None,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Staging
                });

                videoSample = MediaFactory.CreateVideoSampleFromSurface(null);

                // Create the media buffer from the texture
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer);

                using (var buffer2D = mediaBuffer.QueryInterface <Buffer2D>())
                {
                    mediaBuffer.CurrentLength = buffer2D.ContiguousLength;
                }

                // Attach the created buffer to the sample
                videoSample.AddBuffer(mediaBuffer);
            }
            catch (Exception ex)
            {
                logger.Error(ex);

                Close();
                throw;
            }
        }
Ejemplo n.º 8
0
 SinkStream AddStream(SinkWriter sinkWriter, MediaType input, MediaType encoding)
 {
     var sinkStream = sinkWriter.AddStream(encoding);
     sinkStream.InputMediaType = input;
     return sinkStream;
 }