Exemplo n.º 1
0
        private void SetupSampleBuffer(MfVideoArgs args)
        {
            logger.Debug("SetupSampleBuffer(...)");

            int width  = args.Width;
            int height = args.Height;

            //if (width % 2 != 0)
            //{// должно быть четным...
            //    width++;
            //}

            //if (height % 2 != 0)
            //{
            //    height++;
            //}

            Format format = MfTool.GetDXGIFormatFromVideoFormatGuid(args.Format);

            if (format == Format.Unknown)
            {
                throw new NotSupportedException("Format not suppored " + args.Format);
            }

            var _descr = new Texture2DDescription
            {
                Format            = format,
                Width             = width,
                Height            = height,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1 },
            };

            bufTexture = new Texture2D(device, _descr);

            MediaBuffer mediaBuffer = null;

            try
            {
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer);
                bufSample = MediaFactory.CreateSample();
                bufSample.AddBuffer(mediaBuffer);
            }
            finally
            {
                mediaBuffer?.Dispose();
            }
        }
Exemplo n.º 2
0
        /// <inheritdoc />
        /// <summary>
        ///   Encodes a video frame.
        /// </summary>
        /// <param name="data">Bitmap data.</param>
        /// <param name="time">Time in ticks.</param>
        /// <param name="stream">Output stream.</param>
        public void Encode(BitmapData data, long time, Stream stream)
        {
            // create sample
            Sample      sample = MediaFactory.CreateSample();
            MediaBuffer buffer;

            if (this.surface == null)
            {
                // create buffer
                MediaFactory.Create2DMediaBuffer(
                    data.Width,
                    data.Height,
                    new FourCC((int)Format.X8R8G8B8),
                    new RawBool(false),
                    out buffer);

                // calculate length
                buffer.CurrentLength = data.Stride * data.Height;

                // copy data
                Utilities.CopyMemory(buffer.Lock(out _, out _), data.Scan0, buffer.CurrentLength);

                // unlock bits
                buffer.Unlock();
            }
            else
            {
                // create buffer
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID,
                                                     this.surface,
                                                     0,
                                                     new RawBool(false),
                                                     out buffer);

                // set buffer length
                buffer.CurrentLength = buffer.QueryInterface <Buffer2D>().ContiguousLength;
            }

            // add buffer to sample
            sample.AddBuffer(buffer);
            sample.SampleTime = time;

            try { this.sinkWriter.WriteSample(this.streamIdx, sample); } catch (SharpDXException) { } finally {
                buffer.Dispose();
                sample.Dispose();
            }
        }
Exemplo n.º 3
0
        public MfWriter(VideoWriterArgs Args, Device Device)
        {
            if (Args.ImageProvider.EditorType == typeof(Direct2DEditor))
            {
                _inputFormat = VideoFormatGuids.NV12;
            }
            else
            {
                _inputFormat = VideoFormatGuids.Rgb32;
            }

            _device = Device;

            _frameDuration = TenPower7 / Args.FrameRate;

            var attr = GetSinkWriterAttributes(Device);

            _writer = MediaFactory.CreateSinkWriterFromURL(Args.FileName, null, attr);

            var w = Args.ImageProvider.Width;
            var h = Args.ImageProvider.Height;

            _bufferSize = w * h * 4;

            using (var mediaTypeOut = new MediaType())
            {
                mediaTypeOut.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                mediaTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodingFormat);
                mediaTypeOut.Set(MediaTypeAttributeKeys.AvgBitrate, BitRate);
                mediaTypeOut.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                mediaTypeOut.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h));
                mediaTypeOut.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1));
                mediaTypeOut.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1));
                _writer.AddStream(mediaTypeOut, out _);
            }

            using (var mediaTypeIn = new MediaType())
            {
                mediaTypeIn.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                mediaTypeIn.Set(MediaTypeAttributeKeys.Subtype, _inputFormat);
                mediaTypeIn.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                mediaTypeIn.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h));
                mediaTypeIn.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1));
                mediaTypeIn.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1));
                mediaTypeIn.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1);

                var encoderParams = new MediaAttributes(2);
                encoderParams.Set(RateControlModeKey, RateControlMode.Quality);
                encoderParams.Set(QualityKey, Args.VideoQuality);
                _writer.SetInputMediaType(VideoStreamIndex, mediaTypeIn, encoderParams);
            }

            if (Args.AudioProvider != null)
            {
                var wf = Args.AudioProvider.WaveFormat;
                _audioInBytesPerSecond = wf.SampleRate * wf.Channels * wf.BitsPerSample / 8;

                using (var audioTypeOut = GetMediaType(wf))
                {
                    audioTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodedAudioFormat);
                    audioTypeOut.Set(MediaTypeAttributeKeys.AudioAvgBytesPerSecond, GetAacBitrate(Args.AudioQuality));
                    _writer.AddStream(audioTypeOut, out _);
                }

                using (var audioTypeIn = GetMediaType(wf))
                {
                    audioTypeIn.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm);
                    _writer.SetInputMediaType(AudioStreamIndex, audioTypeIn, null);
                }
            }

            _writer.BeginWriting();

            _copyTexture = new Texture2D(Device, new Texture2DDescription
            {
                CpuAccessFlags    = CpuAccessFlags.Read,
                BindFlags         = BindFlags.None,
                Format            = Format.B8G8R8A8_UNorm,
                Width             = w,
                Height            = h,
                OptionFlags       = ResourceOptionFlags.None,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Staging
            });

            _sample = MediaFactory.CreateVideoSampleFromSurface(null);

            // Create the media buffer from the texture
            MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, _copyTexture, 0, false, out _mediaBuffer);

            using (var buffer2D = _mediaBuffer.QueryInterface <Buffer2D>())
                _mediaBuffer.CurrentLength = buffer2D.ContiguousLength;

            // Attach the created buffer to the sample
            _sample.AddBuffer(_mediaBuffer);
        }
Exemplo n.º 4
0
        public MfColorConverter(int Width, int Height, Device Device)
        {
            var transforms = MediaFactory.FindTransform(TransformCategoryGuids.VideoProcessor, TransformEnumFlag.All);

            _colorConverter = transforms[0].ActivateObject <Transform>();

            _deviceMan = new DXGIDeviceManager();
            _deviceMan.ResetDevice(Device);

            _colorConverter.ProcessMessage(TMessageType.SetD3DManager, _deviceMan.NativePointer);

            using (var mediaTypeIn = new MediaType())
            {
                mediaTypeIn.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                mediaTypeIn.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Rgb32);
                mediaTypeIn.Set(MediaTypeAttributeKeys.FrameSize, MfWriter.PackLong(Width, Height));
                mediaTypeIn.Set(MediaTypeAttributeKeys.DefaultStride, Width * 4);
                mediaTypeIn.Set(MediaTypeAttributeKeys.FixedSizeSamples, 1);
                mediaTypeIn.Set(MediaTypeAttributeKeys.SampleSize, Width * Height * 4);

                _colorConverter.SetInputType(0, mediaTypeIn, 0);
            }

            var outputStride     = Width * 12 / 8;
            var outputSampleSize = Height * outputStride;

            using (var mediaTypeOut = new MediaType())
            {
                mediaTypeOut.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                mediaTypeOut.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12);
                mediaTypeOut.Set(MediaTypeAttributeKeys.FrameSize, MfWriter.PackLong(Width, Height));
                mediaTypeOut.Set(MediaTypeAttributeKeys.DefaultStride, outputStride);
                mediaTypeOut.Set(MediaTypeAttributeKeys.FixedSizeSamples, 1);
                mediaTypeOut.Set(MediaTypeAttributeKeys.SampleSize, outputSampleSize);

                _colorConverter.SetOutputType(0, mediaTypeOut, 0);
            }

            _colorConverter.ProcessMessage(TMessageType.NotifyBeginStreaming, IntPtr.Zero);

            _copyTexture = new Texture2D(Device, new Texture2DDescription
            {
                CpuAccessFlags    = CpuAccessFlags.None,
                BindFlags         = BindFlags.RenderTarget | BindFlags.ShaderResource,
                Format            = Format.B8G8R8A8_UNorm,
                Width             = Width,
                Height            = Height,
                OptionFlags       = ResourceOptionFlags.None,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Default
            });

            _inputSample = MediaFactory.CreateVideoSampleFromSurface(null);

            // Create the media buffer from the texture
            MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, _copyTexture, 0, false, out var inputBuffer);

            using (var buffer2D = inputBuffer.QueryInterface <Buffer2D>())
                inputBuffer.CurrentLength = buffer2D.ContiguousLength;

            // Attach the created buffer to the sample
            _inputSample.AddBuffer(inputBuffer);
        }
Exemplo n.º 5
0
            public void Start1()
            {
                var flags = DeviceCreationFlags.VideoSupport |
                            DeviceCreationFlags.BgraSupport |
                            DeviceCreationFlags.Debug;

                var device = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware, flags);

                using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>())
                {
                    multiThread.SetMultithreadProtected(true);
                }


                System.Drawing.Bitmap bmp        = new System.Drawing.Bitmap(@"D:\Temp\4.bmp");
                Texture2D             rgbTexture = DxTool.GetTexture(bmp, device);

                var bufTexture = new Texture2D(device,
                                               new Texture2DDescription
                {
                    // Format = Format.NV12,
                    Format            = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                    Width             = 1920,
                    Height            = 1080,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1 },
                });

                device.ImmediateContext.CopyResource(rgbTexture, bufTexture);

                var processor  = new MfVideoProcessor(device);
                var inProcArgs = new MfVideoArgs
                {
                    Width  = 1920,
                    Height = 1080,
                    Format = SharpDX.MediaFoundation.VideoFormatGuids.Argb32,
                };



                var outProcArgs = new MfVideoArgs
                {
                    Width  = 1920,
                    Height = 1080,
                    Format = SharpDX.MediaFoundation.VideoFormatGuids.NV12,                    //.Argb32,
                };

                processor.Setup(inProcArgs, outProcArgs);
                processor.Start();


                var rgbSample = MediaFactory.CreateVideoSampleFromSurface(null);

                // Create the media buffer from the texture
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out var mediaBuffer);

                using (var buffer2D = mediaBuffer.QueryInterface <Buffer2D>())
                {
                    mediaBuffer.CurrentLength = buffer2D.ContiguousLength;
                }

                rgbSample.AddBuffer(mediaBuffer);

                rgbSample.SampleTime     = 0;
                rgbSample.SampleDuration = 0;

                var result = processor.ProcessSample(rgbSample, out var nv12Sample);

                Task.Run(() =>
                {
                    Stopwatch sw = new Stopwatch();
                    int fps      = 60;
                    int interval = (int)(1000.0 / fps);

                    int _count = 1;

                    long globalTime = 0;


                    while (true)
                    {
                        if (result)
                        {
                            globalTime += sw.ElapsedMilliseconds;
                            sw.Restart();


                            nv12Sample.SampleTime     = MfTool.SecToMfTicks((globalTime / 1000.0));
                            nv12Sample.SampleDuration = MfTool.SecToMfTicks(((int)interval / 1000.0));

                            //sample.SampleTime = MfTool.SecToMfTicks((globalTime / 1000.0));
                            //sample.SampleDuration = MfTool.SecToMfTicks(((int)interval / 1000.0));

                            SampleReady?.Invoke(nv12Sample);


                            var msec = sw.ElapsedMilliseconds;

                            var delay = interval - msec;
                            if (delay < 0)
                            {
                                delay = 1;
                            }

                            // var delay = 1;
                            Thread.Sleep((int)delay);
                            var elapsedMilliseconds = sw.ElapsedMilliseconds;
                            globalTime += elapsedMilliseconds;
                            _count++;
                        }

                        //nv12Sample?.Dispose();

                        //Thread.Sleep(30);
                    }
                });
            }
        /// <inheritdoc />
        /// <summary>
        ///   Processes and encodes a frame into the destination stream
        /// </summary>
        /// <param name="frame">
        ///   A <see cref="VideoFrame" /> instance containing information about the locked frame bitmap or texture
        /// </param>
        public override void Feed(VideoFrame frame)
        {
            // create sample
            Sample      sample = MediaFactory.CreateSample();
            MediaBuffer buffer;

            switch (frame)
            {
            case D3D11VideoFrame d3D11Frame: // Direct3D 11 texture
                // create media buffer
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID,
                                                     d3D11Frame.Texture,
                                                     0,
                                                     new RawBool(false),
                                                     out buffer);

                // set buffer length
                using (Buffer2D buffer2D = buffer.QueryInterface <Buffer2D>()) {
                    buffer.CurrentLength = buffer2D.ContiguousLength;
                }

                break;

            case BitmapVideoFrame bmpFrame: // WIC bitmap
                // create media buffer
                MediaFactory.CreateWICBitmapBuffer(typeof(Bitmap).GUID, bmpFrame.Bitmap, out buffer);

                // calculate buffer length
                buffer.CurrentLength = bmpFrame.BitmapLock.Stride * bmpFrame.Height;

                // copy pixels
                Utilities.CopyMemory(buffer.Lock(out _, out _), bmpFrame.BitmapLock.Data.DataPointer, buffer.CurrentLength);

                // unlock bits
                buffer.Unlock();

                break;

            case GdiBitmapVideoFrame gdiFrame: // GDI-compatible bitmap
                // create media buffer
                // create buffer for copying the bitmap data
                MediaFactory.Create2DMediaBuffer(frame.Width,
                                                 frame.Height,
                                                 new FourCC((int)Format.X8R8G8B8),
                                                 new RawBool(false), out buffer);

                // calculate buffer length
                buffer.CurrentLength = gdiFrame.BitmapData.Stride * frame.Height;

                // copy data
                Utilities.CopyMemory(buffer.Lock(out _, out _), gdiFrame.BitmapData.Scan0, buffer.CurrentLength);

                // unlock bits
                buffer.Unlock();
                break;

            default:
                throw new NotSupportedException("The supplied frame does not have a supported type");
            }

            // add buffer to sample
            sample.AddBuffer(buffer);

            // set up sample timing
            if (this.lastFrameTime != 0)
            {
                sample.SampleTime     = frame.PresentTime - this.firstFramePresentTime;
                sample.SampleDuration = frame.PresentTime - this.lastFrameTime;
            }
            else
            {
                // set first frame present time so that we can set the timestamp of subsequent frames relative to the
                // beggining of the video
                this.firstFramePresentTime = frame.PresentTime;
            }

            this.lastFrameTime = frame.PresentTime;

            try {
                this.sinkWriter.WriteSample(this.streamIdx, sample);
            } finally {
                buffer.Dispose();
                sample.Dispose();
            }
        }
Exemplo n.º 7
0
        public void Encode(Texture2D texture)
        {
            // var device = encoder?.device;

            if (device != null)
            {
                using (var sharedRes = texture.QueryInterface <SharpDX.DXGI.Resource>())
                {
                    using (var sharedTexture = device.OpenSharedResource <Texture2D>(sharedRes.SharedHandle))
                    {
                        device.ImmediateContext.CopyResource(sharedTexture, bufTexture);
                    }
                }
            }

            Sample inputSample = null;

            try
            {
                MediaBuffer mediaBuffer = null;
                try
                {
                    MediaFactory.CreateDXGISurfaceBuffer(IID.D3D11Texture2D, bufTexture, 0, false, out mediaBuffer);
                    inputSample = MediaFactory.CreateSample();
                    inputSample.AddBuffer(mediaBuffer);

                    inputSample.SampleTime     = 0;
                    inputSample.SampleDuration = 0;
                }
                finally
                {
                    mediaBuffer?.Dispose();
                }

                if (processor != null)
                {
                    Sample processedSample = null;
                    try
                    {
                        bool result = processor.ProcessSample(inputSample, out processedSample);
                        if (result)
                        {
                            encoder.ProcessSample(processedSample);
                            //EncodeSample(processedSample);
                        }
                    }
                    finally
                    {
                        processedSample?.Dispose();
                    }
                }
                else
                {
                    encoder.ProcessSample(inputSample);
                    //EncodeSample(inputSample);
                }
            }
            finally
            {
                inputSample?.Dispose();
            }
        }
Exemplo n.º 8
0
        public void Setup(string fileName, MfVideoArgs Args)
        {
            logger.Debug("MfWriter::Init(...)");

            var inputFormat = VideoFormatGuids.NV12;

            //  var inputFormat = VideoFormatGuids.Rgb32; // VideoFormatGuids.NV12


            frameDuration = 10_000_000 / Args.FrameRate;

            var width   = Args.Width;
            var height  = Args.Height;
            var bufSize = width * height * 4;


            try
            {
                using (var attr = new MediaAttributes(6))
                {
                    attr.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1);
                    attr.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0);
                    attr.Set(TranscodeAttributeKeys.TranscodeContainertype, TranscodeContainerTypeGuids.Mpeg4);
                    attr.Set(SinkWriterAttributeKeys.LowLatency, true);
                    attr.Set(SinkWriterAttributeKeys.DisableThrottling, 1);

                    using (var devMan = new DXGIDeviceManager())
                    {
                        devMan.ResetDevice(device);
                        attr.Set(SinkWriterAttributeKeys.D3DManager, devMan);
                    }

                    sinkWriter = MediaFactory.CreateSinkWriterFromURL(fileName, null, attr);
                }

                using (var outputMediaType = new MediaType())
                {
                    outputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                    outputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.H264);
                    outputMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, 8_000_000);
                    outputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    outputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height));
                    outputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate);
                    outputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1));


                    sinkWriter.AddStream(outputMediaType, out videoStreamIndex);

                    Debug.WriteLine("mediaTypeOut " + videoStreamIndex);
                }


                using (var inputMediaType = new MediaType())
                {
                    inputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video);
                    inputMediaType.Set(MediaTypeAttributeKeys.Subtype, inputFormat);
                    inputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive);
                    inputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height));
                    inputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate);
                    inputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1));

                    inputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1);

                    using (var encoderParams = new MediaAttributes(2))
                    {
                        encoderParams.Set(CodecApiPropertyKeys.AVEncCommonRateControlMode, RateControlMode.Quality);
                        encoderParams.Set(CodecApiPropertyKeys.AVEncCommonQuality, Args.Quality);

                        sinkWriter.SetInputMediaType(0, inputMediaType, encoderParams);
                    }
                }

                bufTexture = new Texture2D(device, new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.Read,
                    BindFlags         = BindFlags.None,
                    Format            = Format.B8G8R8A8_UNorm,
                    Width             = width,
                    Height            = height,
                    OptionFlags       = ResourceOptionFlags.None,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Staging
                });

                videoSample = MediaFactory.CreateVideoSampleFromSurface(null);

                // Create the media buffer from the texture
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer);

                using (var buffer2D = mediaBuffer.QueryInterface <Buffer2D>())
                {
                    mediaBuffer.CurrentLength = buffer2D.ContiguousLength;
                }

                // Attach the created buffer to the sample
                videoSample.AddBuffer(mediaBuffer);
            }
            catch (Exception ex)
            {
                logger.Error(ex);

                Close();
                throw;
            }
        }