Esempio n. 1
0
        private void SetupSampleBuffer(MfVideoArgs args)
        {
            logger.Debug("SetupSampleBuffer(...)");

            int width  = args.Width;
            int height = args.Height;

            //if (width % 2 != 0)
            //{// должно быть четным...
            //    width++;
            //}

            //if (height % 2 != 0)
            //{
            //    height++;
            //}

            Format format = MfTool.GetDXGIFormatFromVideoFormatGuid(args.Format);

            if (format == Format.Unknown)
            {
                throw new NotSupportedException("Format not suppored " + args.Format);
            }

            var _descr = new Texture2DDescription
            {
                Format            = format,
                Width             = width,
                Height            = height,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1 },
            };

            bufTexture = new Texture2D(device, _descr);

            MediaBuffer mediaBuffer = null;

            try
            {
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer);
                bufSample = MediaFactory.CreateSample();
                bufSample.AddBuffer(mediaBuffer);
            }
            finally
            {
                mediaBuffer?.Dispose();
            }
        }
Esempio n. 2
0
        private void Drain()
        {
            encoder.GetOutputStreamInfo(0, out TOutputStreamInformation streamInfo);

            MftOutputStreamInformationFlags flags = (MftOutputStreamInformationFlags)streamInfo.DwFlags;
            bool createSample = !flags.HasFlag(MftOutputStreamInformationFlags.MftOutputStreamProvidesSamples);

            bool drain = true;

            do
            {
                Sample outputSample = null;
                try
                {
                    if (createSample)
                    {
                        outputSample                = MediaFactory.CreateSample();
                        outputSample.SampleTime     = 0;
                        outputSample.SampleDuration = 0;

                        using (var mediaBuffer = MediaFactory.CreateMemoryBuffer(streamInfo.CbSize))
                        {
                            outputSample.AddBuffer(mediaBuffer);
                        }
                    }

                    TOutputDataBuffer[] outputDataBuffer = new TOutputDataBuffer[1];

                    var data = new TOutputDataBuffer
                    {
                        DwStatus   = 0,
                        DwStreamID = 0,
                        PSample    = outputSample,
                        PEvents    = null,
                    };
                    outputDataBuffer[0] = data;

                    var res = encoder.TryProcessOutput(TransformProcessOutputFlags.None, outputDataBuffer, out var status);

                    drain = res.Success; //(res != SharpDX.MediaFoundation.ResultCode.TransformNeedMoreInput);
                }
                finally
                {
                    outputSample?.Dispose();
                }
            }while (drain);
        }
Esempio n. 3
0
        /// <inheritdoc />
        /// <summary>
        ///   Encodes a video frame.
        /// </summary>
        /// <param name="data">Bitmap data.</param>
        /// <param name="time">Time in ticks.</param>
        /// <param name="stream">Output stream.</param>
        public void Encode(BitmapData data, long time, Stream stream)
        {
            // create sample
            Sample      sample = MediaFactory.CreateSample();
            MediaBuffer buffer;

            if (this.surface == null)
            {
                // create buffer
                MediaFactory.Create2DMediaBuffer(
                    data.Width,
                    data.Height,
                    new FourCC((int)Format.X8R8G8B8),
                    new RawBool(false),
                    out buffer);

                // calculate length
                buffer.CurrentLength = data.Stride * data.Height;

                // copy data
                Utilities.CopyMemory(buffer.Lock(out _, out _), data.Scan0, buffer.CurrentLength);

                // unlock bits
                buffer.Unlock();
            }
            else
            {
                // create buffer
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID,
                                                     this.surface,
                                                     0,
                                                     new RawBool(false),
                                                     out buffer);

                // set buffer length
                buffer.CurrentLength = buffer.QueryInterface <Buffer2D>().ContiguousLength;
            }

            // add buffer to sample
            sample.AddBuffer(buffer);
            sample.SampleTime = time;

            try { this.sinkWriter.WriteSample(this.streamIdx, sample); } catch (SharpDXException) { } finally {
                buffer.Dispose();
                sample.Dispose();
            }
        }
Esempio n. 4
0
        public bool ProcessSample(Sample inputSample, Action <Sample> OnSampleDecoded, Action <MediaType> OnMediaTypeChanged = null)
        {
            bool Result = false;


            if (inputSample == null)
            {
                return(false);
            }

            decoder.ProcessInput(0, inputSample, 0);

            //if (decoder.OutputStatus == (int)MftOutputStatusFlags.MftOutputStatusSampleReady)
            {
                decoder.GetOutputStreamInfo(0, out TOutputStreamInformation streamInfo);

                MftOutputStreamInformationFlags flags = (MftOutputStreamInformationFlags)streamInfo.DwFlags;
                bool createSample = !flags.HasFlag(MftOutputStreamInformationFlags.MftOutputStreamProvidesSamples);

                do
                {
                    Sample pSample = null;
                    // Create output sample
                    if (createSample)
                    {
                        pSample                = MediaFactory.CreateSample();
                        pSample.SampleTime     = inputSample.SampleTime;
                        pSample.SampleDuration = inputSample.SampleDuration;
                        pSample.SampleFlags    = inputSample.SampleFlags;
                        //logger.Debug("CreateSample: " + inputSample.SampleTime);

                        using (var mediaBuffer = MediaFactory.CreateMemoryBuffer(streamInfo.CbSize))
                        {
                            pSample.AddBuffer(mediaBuffer);
                        }
                    }

                    TOutputDataBuffer[] outputBuffers = new TOutputDataBuffer[1];

                    var outputBuffer = new TOutputDataBuffer
                    {
                        DwStatus   = 0,
                        DwStreamID = 0,
                        PSample    = pSample,
                        PEvents    = null,
                    };
                    outputBuffers[0] = outputBuffer;


                    var res = decoder.TryProcessOutput(TransformProcessOutputFlags.None, outputBuffers, out TransformProcessOutputStatus status);

                    //logger.Info("TryProcessOutput(...) " + res + " " + outputDataBuffer[0].DwStatus);

                    if (res == SharpDX.Result.Ok)
                    {
                        var buf = outputBuffers[0];

                        var outputSample = buf.PSample;

                        var pEvents = buf.PEvents;
                        if (pEvents != null)
                        {
                            var eventsCount = pEvents.ElementCount;

                            Debug.Assert(eventsCount == 0, "eventsCount == 0");

                            if (eventsCount > 0)
                            {
                                for (int i = 0; i < eventsCount; i++)
                                {
                                    var e = pEvents.GetElement(i);
                                    if (e != null)
                                    {
                                        e.Dispose();
                                        e = null;
                                    }
                                }
                            }
                        }


                        Debug.Assert(outputSample != null, "res.Success && outputSample != null");

                        OnSampleDecoded?.Invoke(outputSample);
                        Result = true;
                        //continue;
                    }
                    else if (res == SharpDX.MediaFoundation.ResultCode.TransformNeedMoreInput)
                    {
                        //logger.Info("-------------------------");

                        if (pSample != null)
                        {
                            pSample.Dispose();
                            pSample = null;
                        }

                        Result = true;

                        break;
                    }
                    else if (res == SharpDX.MediaFoundation.ResultCode.TransformStreamChange)
                    {
                        logger.Warn(res.ToString() + " TransformStreamChange");

                        MediaType newOutputType = null;
                        try
                        {
                            decoder.TryGetOutputAvailableType(outputStreamId, 0, out newOutputType);
                            decoder.SetOutputType(outputStreamId, newOutputType, 0);

                            if (OutputMediaType != null)
                            {
                                OutputMediaType.Dispose();
                                OutputMediaType = null;
                            }
                            OutputMediaType = newOutputType;

                            logger.Info("============== NEW OUTPUT TYPE==================");
                            logger.Info(MfTool.LogMediaType(OutputMediaType));


                            OnMediaTypeChanged?.Invoke(OutputMediaType);
                        }
                        finally
                        {
                            //newOutputType?.Dispose();
                            //newOutputType = null;
                        }
                    }
                    else
                    {
                        res.CheckError();

                        Result = false;
                        break;
                    }
                }while (true);
            }

            return(Result);
        }
Esempio n. 5
0
        public bool EncodeSample(Sample inputSample, out Sample outputSample)
        {
            bool Result = false;

            outputSample = null;

            if (inputSample == null)
            {
                return(false);
            }

            encoder.ProcessInput(0, inputSample, 0);

            //if (processor.OutputStatus == (int)MftOutputStatusFlags.MftOutputStatusSampleReady)
            {
                encoder.GetOutputStreamInfo(0, out TOutputStreamInformation streamInfo);

                MftOutputStreamInformationFlags flags = (MftOutputStreamInformationFlags)streamInfo.DwFlags;
                bool createSample = !flags.HasFlag(MftOutputStreamInformationFlags.MftOutputStreamProvidesSamples);

                if (createSample)
                {
                    outputSample = MediaFactory.CreateSample();

                    outputSample.SampleTime     = inputSample.SampleTime;
                    outputSample.SampleDuration = inputSample.SampleDuration;
                    outputSample.SampleFlags    = inputSample.SampleFlags;

                    using (var mediaBuffer = MediaFactory.CreateMemoryBuffer(streamInfo.CbSize))
                    {
                        outputSample.AddBuffer(mediaBuffer);
                    }
                }

                TOutputDataBuffer[] outputDataBuffer = new TOutputDataBuffer[1];

                var data = new TOutputDataBuffer
                {
                    DwStatus   = 0,
                    DwStreamID = 0,
                    PSample    = outputSample,
                    PEvents    = null,
                };
                outputDataBuffer[0] = data;

                var res = encoder.TryProcessOutput(TransformProcessOutputFlags.None, outputDataBuffer, out TransformProcessOutputStatus status);
                if (res == SharpDX.Result.Ok)
                {
                    if (outputSample == null)
                    {
                        outputSample = outputDataBuffer[0].PSample;
                    }

                    Debug.Assert(outputSample != null, "res.Success && outputSample != null");

                    Result = true;
                }
                else if (res == SharpDX.MediaFoundation.ResultCode.TransformNeedMoreInput)
                {
                    logger.Warn(res.ToString() + " TransformNeedMoreInput");

                    Result = true;
                }
                else if (res == SharpDX.MediaFoundation.ResultCode.TransformStreamChange)
                {
                    logger.Warn(res.ToString() + " TransformStreamChange");

                    MediaType newOutputType = null;
                    try
                    {
                        encoder.TryGetOutputAvailableType(outputStreamId, 0, out newOutputType);
                        encoder.SetOutputType(outputStreamId, newOutputType, 0);

                        if (OutputMediaType != null)
                        {
                            OutputMediaType.Dispose();
                            OutputMediaType = null;
                        }
                        OutputMediaType = newOutputType;

                        logger.Info("============== NEW OUTPUT TYPE==================");
                        logger.Info(MfTool.LogMediaType(OutputMediaType));
                    }
                    finally
                    {
                        newOutputType?.Dispose();
                        newOutputType = null;
                    }
                }
                else
                {
                    res.CheckError();
                }
            }

            return(Result);
        }
Esempio n. 6
0
        private void ProcessOutput()
        {
            if (stopping)
            {
                logger.Warn("ProcessOutput() stopping...");
            }

            if (closing)
            {
                logger.Warn("ProcessOutput() closing...");
                return;
            }

            encoder.GetOutputStreamInfo(0, out TOutputStreamInformation streamInfo);

            MftOutputStreamInformationFlags flags = (MftOutputStreamInformationFlags)streamInfo.DwFlags;
            bool createSample = !flags.HasFlag(MftOutputStreamInformationFlags.MftOutputStreamProvidesSamples);

            // Create output sample
            Sample outputSample = null;

            try
            {
                if (createSample)
                {
                    Debug.Assert(streamInfo.CbSize > 0, "streamInfo.CbSize > 0");

                    outputSample = MediaFactory.CreateSample();
                    using (var mediaBuffer = MediaFactory.CreateMemoryBuffer(streamInfo.CbSize))
                    {
                        outputSample.AddBuffer(mediaBuffer);
                    }
                }

                TOutputDataBuffer[] outputDataBuffer = new TOutputDataBuffer[1];

                var data = new TOutputDataBuffer
                {
                    DwStatus   = 0,
                    DwStreamID = 0,
                    PSample    = outputSample,
                    PEvents    = null,
                };
                outputDataBuffer[0] = data;

                var res = encoder.TryProcessOutput(TransformProcessOutputFlags.None, outputDataBuffer, out TransformProcessOutputStatus status);

                if (res == SharpDX.Result.Ok)
                {
                    if (outputSample == null)
                    {
                        outputSample = outputDataBuffer[0].PSample;
                    }

                    Debug.Assert(outputSample != null, "res.Success && outputSample != null");
                    FinalizeSample(outputSample);
                    // SampleReady?.Invoke(outputSample);
                }
                else if (res == SharpDX.MediaFoundation.ResultCode.TransformNeedMoreInput)
                {
                    //logger.Debug(res.ToString() + " TransformNeedMoreInput");
                    //Result = true;
                }
                else if (res == SharpDX.MediaFoundation.ResultCode.TransformStreamChange)
                {// не должны приходить для энкодера...
                    // но приходят для Intel MFT !!!
                    logger.Warn(res.ToString() + " TransformStreamChange");

                    MediaType newOutputType = null;
                    try
                    {
                        encoder.TryGetOutputAvailableType(outputStreamId, 0, out newOutputType);
                        encoder.SetOutputType(outputStreamId, newOutputType, 0);

                        if (OutputMediaType != null)
                        {
                            OutputMediaType.Dispose();
                            OutputMediaType = null;
                        }
                        OutputMediaType = newOutputType;

                        logger.Info("============== NEW OUTPUT TYPE==================");
                        logger.Info(MfTool.LogMediaType(OutputMediaType));
                    }
                    finally
                    {
                        newOutputType?.Dispose();
                        newOutputType = null;
                    }
                }
                else
                {
                    res.CheckError();
                }
            }
            finally
            {
                if (outputSample != null)
                {
                    outputSample.Dispose();
                    outputSample = null;
                }
            }
        }
        /// <inheritdoc />
        /// <summary>
        ///   Processes and encodes a frame into the destination stream
        /// </summary>
        /// <param name="frame">
        ///   A <see cref="VideoFrame" /> instance containing information about the locked frame bitmap or texture
        /// </param>
        public override void Feed(VideoFrame frame)
        {
            // create sample
            Sample      sample = MediaFactory.CreateSample();
            MediaBuffer buffer;

            switch (frame)
            {
            case D3D11VideoFrame d3D11Frame: // Direct3D 11 texture
                // create media buffer
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID,
                                                     d3D11Frame.Texture,
                                                     0,
                                                     new RawBool(false),
                                                     out buffer);

                // set buffer length
                using (Buffer2D buffer2D = buffer.QueryInterface <Buffer2D>()) {
                    buffer.CurrentLength = buffer2D.ContiguousLength;
                }

                break;

            case BitmapVideoFrame bmpFrame: // WIC bitmap
                // create media buffer
                MediaFactory.CreateWICBitmapBuffer(typeof(Bitmap).GUID, bmpFrame.Bitmap, out buffer);

                // calculate buffer length
                buffer.CurrentLength = bmpFrame.BitmapLock.Stride * bmpFrame.Height;

                // copy pixels
                Utilities.CopyMemory(buffer.Lock(out _, out _), bmpFrame.BitmapLock.Data.DataPointer, buffer.CurrentLength);

                // unlock bits
                buffer.Unlock();

                break;

            case GdiBitmapVideoFrame gdiFrame: // GDI-compatible bitmap
                // create media buffer
                // create buffer for copying the bitmap data
                MediaFactory.Create2DMediaBuffer(frame.Width,
                                                 frame.Height,
                                                 new FourCC((int)Format.X8R8G8B8),
                                                 new RawBool(false), out buffer);

                // calculate buffer length
                buffer.CurrentLength = gdiFrame.BitmapData.Stride * frame.Height;

                // copy data
                Utilities.CopyMemory(buffer.Lock(out _, out _), gdiFrame.BitmapData.Scan0, buffer.CurrentLength);

                // unlock bits
                buffer.Unlock();
                break;

            default:
                throw new NotSupportedException("The supplied frame does not have a supported type");
            }

            // add buffer to sample
            sample.AddBuffer(buffer);

            // set up sample timing
            if (this.lastFrameTime != 0)
            {
                sample.SampleTime     = frame.PresentTime - this.firstFramePresentTime;
                sample.SampleDuration = frame.PresentTime - this.lastFrameTime;
            }
            else
            {
                // set first frame present time so that we can set the timestamp of subsequent frames relative to the
                // beggining of the video
                this.firstFramePresentTime = frame.PresentTime;
            }

            this.lastFrameTime = frame.PresentTime;

            try {
                this.sinkWriter.WriteSample(this.streamIdx, sample);
            } finally {
                buffer.Dispose();
                sample.Dispose();
            }
        }
Esempio n. 8
0
        public void Encode(Texture2D texture)
        {
            // var device = encoder?.device;

            if (device != null)
            {
                using (var sharedRes = texture.QueryInterface <SharpDX.DXGI.Resource>())
                {
                    using (var sharedTexture = device.OpenSharedResource <Texture2D>(sharedRes.SharedHandle))
                    {
                        device.ImmediateContext.CopyResource(sharedTexture, bufTexture);
                    }
                }
            }

            Sample inputSample = null;

            try
            {
                MediaBuffer mediaBuffer = null;
                try
                {
                    MediaFactory.CreateDXGISurfaceBuffer(IID.D3D11Texture2D, bufTexture, 0, false, out mediaBuffer);
                    inputSample = MediaFactory.CreateSample();
                    inputSample.AddBuffer(mediaBuffer);

                    inputSample.SampleTime     = 0;
                    inputSample.SampleDuration = 0;
                }
                finally
                {
                    mediaBuffer?.Dispose();
                }

                if (processor != null)
                {
                    Sample processedSample = null;
                    try
                    {
                        bool result = processor.ProcessSample(inputSample, out processedSample);
                        if (result)
                        {
                            encoder.ProcessSample(processedSample);
                            //EncodeSample(processedSample);
                        }
                    }
                    finally
                    {
                        processedSample?.Dispose();
                    }
                }
                else
                {
                    encoder.ProcessSample(inputSample);
                    //EncodeSample(inputSample);
                }
            }
            finally
            {
                inputSample?.Dispose();
            }
        }
Esempio n. 9
0
        public void Setup(string fileName, MfVideoArgs Args)
        {
            logger.Debug("MfWriter::Init(...)");

            var inputFormat = VideoFormatGuids.NV12;

            //  var inputFormat = VideoFormatGuids.Rgb32; // VideoFormatGuids.NV12


            frameDuration = 10_000_000 / Args.FrameRate;

            var width   = Args.Width;
            var height  = Args.Height;
            var bufSize = width * height * 4;


            try
            {
                using (var attr = new MediaAttributes(6))
                {
                    attr.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1);
                    attr.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0);
                    attr.Set(TranscodeAttributeKeys.TranscodeContainertype, TranscodeContainerTypeGuids.Mpeg4);
                    attr.Set(SinkWriterAttributeKeys.LowLatency, true);
                    attr.Set(SinkWriterAttributeKeys.DisableThrottling, 1);

                    using (var devMan = new DXGIDeviceManager())
                    {
                        devMan.ResetDevice(device);
                        attr.Set(SinkWriterAttributeKeys.D3DManager, devMan);
                    }

                    sinkWriter = MediaFactory.CreateSinkWriterFromURL(fileName, null, attr);
                }

                using (var outputMediaType = new MediaType())
                {
                    outputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                    outputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.H264);
                    outputMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, 8_000_000);
                    outputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    outputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height));
                    outputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate);
                    outputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1));


                    sinkWriter.AddStream(outputMediaType, out videoStreamIndex);

                    Debug.WriteLine("mediaTypeOut " + videoStreamIndex);
                }


                using (var inputMediaType = new MediaType())
                {
                    inputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                    inputMediaType.Set(MediaTypeAttributeKeys.Subtype, inputFormat);
                    inputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    inputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height));
                    inputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate);
                    inputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1));

                    inputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1);

                    using (var encoderParams = new MediaAttributes(2))
                    {
                        encoderParams.Set(CodecApiPropertyKeys.AVEncCommonRateControlMode, RateControlMode.Quality);
                        encoderParams.Set(CodecApiPropertyKeys.AVEncCommonQuality, Args.Quality);

                        sinkWriter.SetInputMediaType(0, inputMediaType, encoderParams);
                    }
                }

                bufTexture = new Texture2D(device, new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.Read,
                    BindFlags         = BindFlags.None,
                    Format            = Format.B8G8R8A8_UNorm,
                    Width             = width,
                    Height            = height,
                    OptionFlags       = ResourceOptionFlags.None,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Staging
                });

                videoSample = MediaFactory.CreateVideoSampleFromSurface(null);

                // Create the media buffer from the texture
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer);

                using (var buffer2D = mediaBuffer.QueryInterface <Buffer2D>())
                {
                    mediaBuffer.CurrentLength = buffer2D.ContiguousLength;
                }

                // Attach the created buffer to the sample
                videoSample.AddBuffer(mediaBuffer);
            }
            catch (Exception ex)
            {
                logger.Error(ex);

                Close();
                throw;
            }
        }