Ejemplo n.º 1
0
        private void Media_MediaOpening(object sender, MediaOpeningRoutedEventArgs e)
        {
            if (VIDTextBox.Text.StartsWith("udp://"))
            {
                e.Options.VideoFilter = "yadif";
            }

            var videoStream = e.Options.VideoStream;

            if (videoStream != null)
            {
                // Check if the video requires deinterlacing
                var requiresDeinterlace = videoStream.FieldOrder != AVFieldOrder.AV_FIELD_PROGRESSIVE &&
                                          videoStream.FieldOrder != AVFieldOrder.AV_FIELD_UNKNOWN;

                // Hardwrae device priorities
                var deviceCandidates = new AVHWDeviceType[]
                {
                    AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
                    AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
                    AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA
                };

                // Hardware device selection
                if (videoStream.FPS <= 30)
                {
                    foreach (var deviceType in deviceCandidates)
                    {
                        var accelerator = videoStream.HardwareDevices.FirstOrDefault(d => d.DeviceType == deviceType);
                        if (accelerator != null)
                        {
                            if (Platform.GuiContext.Current.IsInDebugMode)
                            {
                                e.Options.VideoHardwareDevice = accelerator;
                            }

                            break;
                        }
                    }
                }

                var videoFilter = new StringBuilder();

                // The yadif filter deinterlaces the video; we check the field order if we need
                // to deinterlace the video automatically
                if (requiresDeinterlace)
                {
                    videoFilter.Append("yadif,");
                }

                // Scale down to maximum 1080p screen resolution.
                if (videoStream.PixelHeight > 1080)
                {
                    // e.Options.VideoHardwareDevice = null;
                    videoFilter.Append($"scale=-1:1080,");
                }

                e.Options.VideoFilter = videoFilter.ToString().TrimEnd(',');
            }
        }
Ejemplo n.º 2
0
        private static unsafe void DecodeAllFramesToImages(AVHWDeviceType HWDevice)
        {
            // decode all frames from url, please not it might local resorce, e.g. string url = "../../sample_mpeg4.mp4";
            var url = "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"; // be advised this file holds 1440 frames

            using (var vsd = new VideoStreamDecoder(url, HWDevice))
            {
                Console.WriteLine($"codec name: {vsd.CodecName}");

                var info = vsd.GetContextInfo();
                info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));

                var sourceSize             = vsd.FrameSize;
                var sourcePixelFormat      = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);
                var destinationSize        = sourceSize;
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    var frameNumber = 0;
                    while (vsd.TryDecodeNextFrame(out var frame))
                    {
                        var convertedFrame = vfc.Convert(frame);

                        using (var bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]))
                            bitmap.Save($"frame.{frameNumber:D8}.jpg", ImageFormat.Jpeg);

                        Console.WriteLine($"frame: {frameNumber}");
                        frameNumber++;
                    }
                }
            }
        }
Ejemplo n.º 3
0
        public static HardwareVideoDecoder?ToHardwareVideoDecoder(this AVHWDeviceType hwDeviceType)
        {
            switch (hwDeviceType)
            {
            case AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA:
                return(HardwareVideoDecoder.NVDEC);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_QSV:
                return(HardwareVideoDecoder.QuickSyncVideo);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2:
                return(HardwareVideoDecoder.DXVA2);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU:
                return(HardwareVideoDecoder.VDPAU);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI:
                return(HardwareVideoDecoder.VAAPI);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC:
                return(HardwareVideoDecoder.MediaCodec);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX:
                return(HardwareVideoDecoder.VideoToolbox);

            default:
                return(null);
            }
        }
Ejemplo n.º 4
0
        private static AVPixelFormat GetHWPixelFormat(AVHWDeviceType hWDevice)
        {
            switch (hWDevice)
            {
            case AVHWDeviceType.AV_HWDEVICE_TYPE_NONE: return(AVPixelFormat.AV_PIX_FMT_NONE);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU: return(AVPixelFormat.AV_PIX_FMT_VDPAU);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA: return(AVPixelFormat.AV_PIX_FMT_CUDA);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI: return(AVPixelFormat.AV_PIX_FMT_VAAPI);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2: return(AVPixelFormat.AV_PIX_FMT_NV12);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_QSV: return(AVPixelFormat.AV_PIX_FMT_QSV);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX: return(AVPixelFormat.AV_PIX_FMT_VIDEOTOOLBOX);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA: return(AVPixelFormat.AV_PIX_FMT_NV12);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_DRM: return(AVPixelFormat.AV_PIX_FMT_DRM_PRIME);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_OPENCL: return(AVPixelFormat.AV_PIX_FMT_OPENCL);

            case AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC: return(AVPixelFormat.AV_PIX_FMT_MEDIACODEC);

            default: return(AVPixelFormat.AV_PIX_FMT_NONE);
            }
        }
Ejemplo n.º 5
0
    public StreamDecoder(
        AVFormatContext *formatContext,
        int streamIndex,
        AVCodec *codec,
        AVHWDeviceType hwDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        : base(null)
    {
        _streamIndex   = streamIndex;
        _formatContext = formatContext;
        _stream        = formatContext->streams[streamIndex];

        _codecContext = ffmpeg.avcodec_alloc_context3(codec);
        if (hwDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            ffmpeg.av_hwdevice_ctx_create(&_codecContext->hw_device_ctx, hwDeviceType, null, null, 0)
            .ThrowExceptionIfError();
        }

        ffmpeg.avcodec_parameters_to_context(_codecContext, _stream->codecpar)
        .ThrowExceptionIfError();
        ffmpeg.avcodec_open2(_codecContext, codec, null).ThrowExceptionIfError();
        _codecContext->pkt_timebase = _stream->time_base;

        _codec = codec;

        _packet        = ffmpeg.av_packet_alloc();
        _frame         = ffmpeg.av_frame_alloc();
        _receivedFrame = ffmpeg.av_frame_alloc();
    }
Ejemplo n.º 6
0
        public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *codec = null;

            _streamIndex = ffmpeg
                           .av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0)
                           .ThrowExceptionIfError();
            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);
            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0)
                .ThrowExceptionIfError();
            }
            ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar)
            .ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(codec->id);
            FrameSize   = new Size(_pCodecContext->width, _pCodecContext->height);
            PixelFormat = _pCodecContext->pix_fmt;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();
        }
Ejemplo n.º 7
0
        //public AVFormatContext* PFormatContext
        //{
        //    get
        //    {
        //        return _pFormatContext;
        //    }
        //}

        public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *codec = null;

            // TODO: Why is ffmpeg.AV_CODEC_FLAG_LOW_DELAY ignored!
            _streamIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO,
                                                      -1, -1, &codec, ffmpeg.AV_CODEC_FLAG_LOW_DELAY).ThrowExceptionIfError();
            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);

            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            }
            ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

            CodecName            = ffmpeg.avcodec_get_name(codec->id);
            FrameSize            = new Size(_pCodecContext->width, _pCodecContext->height);
            PixelFormat          = _pCodecContext->pix_fmt;
            DurationMilliseconds = _pFormatContext->duration;
            Framerate            = 25;// _pFormatContext->video_codec->supported_framerates[0].num / _pFormatContext->video_codec->supported_framerates[0].den;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();
        }
Ejemplo n.º 8
0
        private static void ConfigureHWDecoder(out AVHWDeviceType HWtype)
        {
            HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
            Console.WriteLine("Use hardware acceleration for decoding?[n]");
            var key = Console.ReadLine();
            var availableHWDecoders = new Dictionary <int, AVHWDeviceType>();

            if (key == "y")
            {
                Console.WriteLine("Select hardware decoder:");
                var type   = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
                var number = 0;
                while ((type = ffmpeg.av_hwdevice_iterate_types(type)) != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
                {
                    Console.WriteLine($"{++number}. {type}");
                    availableHWDecoders.Add(number, type);
                }
                if (availableHWDecoders.Count == 0)
                {
                    Console.WriteLine("Your system have no hardware decoders.");
                    HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
                    return;
                }
                int decoderNumber = availableHWDecoders.SingleOrDefault(t => t.Value == AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2).Key;
                if (decoderNumber == 0)
                {
                    decoderNumber = availableHWDecoders.First().Key;
                }
                Console.WriteLine($"Selected [{decoderNumber}]");
                int.TryParse(Console.ReadLine(), out var inputDecoderNumber);
                availableHWDecoders.TryGetValue(inputDecoderNumber == 0 ? decoderNumber : inputDecoderNumber, out HWtype);
            }
        }
Ejemplo n.º 9
0
        private void ConfigureHWDecoder(out AVHWDeviceType HWtype)
        {
            HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
            if (!Hw)
            {
                return;
            }
            Debug.WriteLine("Use hardware acceleration for decoding?[n]");

            var availableHWDecoders = new Dictionary <int, AVHWDeviceType>();

            Debug.WriteLine("Select hardware decoder:");
            var type   = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
            var number = 0;

            while ((type = ffmpeg.av_hwdevice_iterate_types(type)) != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                Debug.WriteLine($"{++number}. {type}");
                availableHWDecoders.Add(number, type);
            }
            if (availableHWDecoders.Count == 0)
            {
                Debug.WriteLine("Your system have no hardware decoders.");
                HWtype = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
                return;
            }
            int decoderNumber = availableHWDecoders.SingleOrDefault(t => t.Value == AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2).Key;

            if (decoderNumber == 0)
            {
                decoderNumber = availableHWDecoders.First().Key;
            }
            availableHWDecoders.TryGetValue(decoderNumber, out HWtype);
        }
Ejemplo n.º 10
0
        public AudioStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *videoCodec = null;

            _streamVideoIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &videoCodec, 0).ThrowExceptionIfError();
            _pVideoCodecContext = ffmpeg.avcodec_alloc_context3(videoCodec);
            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pVideoCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            }
            ffmpeg.avcodec_parameters_to_context(_pVideoCodecContext, _pFormatContext->streams[_streamVideoIndex]->codecpar).ThrowExceptionIfError();
            if (_pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den != 0)
            {
                Fps = _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.num / _pFormatContext->streams[_streamVideoIndex]->avg_frame_rate.den;
                Console.WriteLine("计算得到FPS");
            }
            else
            {
                Console.WriteLine("默认FPS");
                Fps = 25;
            }
            ffmpeg.avcodec_open2(_pVideoCodecContext, videoCodec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(videoCodec->id);
            FrameSize   = new Size(_pVideoCodecContext->width, _pVideoCodecContext->height);
            PixelFormat = _pVideoCodecContext->pix_fmt;

            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();



            AVCodec *audioCodec = null;

            _streamAudioIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &audioCodec, 0).ThrowExceptionIfError();
            _pAudioCodecContext = ffmpeg.avcodec_alloc_context3(audioCodec);
            ffmpeg.avcodec_parameters_to_context(_pAudioCodecContext, _pFormatContext->streams[_streamAudioIndex]->codecpar).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pAudioCodecContext, audioCodec, null).ThrowExceptionIfError();
            if (_streamAudioIndex > 0)
            {
                AVStream *avs = _pFormatContext->streams[_streamAudioIndex];
                Console.WriteLine($"codec_id:{avs->codecpar->codec_id}");
                Console.WriteLine($"format:{avs->codecpar->format}");
                Console.WriteLine($"sample_rate:{avs->codecpar->sample_rate}");
                Console.WriteLine($"channels:{avs->codecpar->channels}");
                Console.WriteLine($"frame_size:{avs->codecpar->frame_size}");
                in_sample_fmt  = _pAudioCodecContext->sample_fmt;
                in_sample_rate = _pAudioCodecContext->sample_rate;    //输入的采样率
                in_ch_layout   = _pAudioCodecContext->channel_layout; //输入的声道布局
                in_channels    = _pAudioCodecContext->channels;
                in_start_time  = avs->start_time;
            }
        }
Ejemplo n.º 11
0
        public VideoStreamDecoder(string url, VideoInputType inputType, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            try
            {
                ffmpeg.avdevice_register_all();

                iFormatContext = ffmpeg.avformat_alloc_context();
                receivedFrame  = ffmpeg.av_frame_alloc();

                var _iFormatContext = iFormatContext;

                AVDictionary *avDict;
                ffmpeg.av_dict_set(&avDict, "reorder_queue_size", "1", 0);

                switch (inputType)
                {
                case VideoInputType.CAM_DEVICE:
                    AVInputFormat *iformat = ffmpeg.av_find_input_format("dshow");
                    ffmpeg.avformat_open_input(&_iFormatContext, url, iformat, null).ThrowExceptionIfError();
                    break;

                case VideoInputType.RTP_RTSP:
                    ffmpeg.avformat_open_input(&_iFormatContext, url, null, &avDict).ThrowExceptionIfError();
                    break;

                default:
                    break;
                }

                ffmpeg.avformat_find_stream_info(iFormatContext, null).ThrowExceptionIfError();

                AVCodec *codec;

                dec_stream_index = ffmpeg.av_find_best_stream(iFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0).ThrowExceptionIfError();


                iCodecContext = ffmpeg.avcodec_alloc_context3(codec);

                if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
                {
                    ffmpeg.av_hwdevice_ctx_create(&iCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
                }

                ffmpeg.avcodec_parameters_to_context(iCodecContext, iFormatContext->streams[dec_stream_index]->codecpar).ThrowExceptionIfError();
                ffmpeg.avcodec_open2(iCodecContext, codec, null).ThrowExceptionIfError();

                CodecName   = ffmpeg.avcodec_get_name(codec->id);
                FrameSize   = new Size(iCodecContext->width, iCodecContext->height);
                PixelFormat = iCodecContext->pix_fmt;

                rawPacket    = ffmpeg.av_packet_alloc();
                decodedFrame = ffmpeg.av_frame_alloc();
            }
            catch (AccessViolationException ex)
            {
                throw new AccessViolationException("Access Violation Exception", ex);
            }
        }
Ejemplo n.º 12
0
        private static AVPixelFormat FindPixelFormat(AVHWDeviceType deviceType)
        {
            if (deviceType == AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA)
            {
                return(AVPixelFormat.AV_PIX_FMT_D3D11);
            }

            throw new NotImplementedException($"Hardware device type '{deviceType}' not supported");
        }
Ejemplo n.º 13
0
        public void AutoSelectHardwareDevice()
        {
            var types = MediaHelper.GetHardwareDeviceTypes();

            if (types.Length > 0)
            {
                HardwareDevice = types[0];
            }
        }
Ejemplo n.º 14
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Image screenboard = ScreenBoradImage.Image;

            status        = "정상 운행중";
            end           = "프로그램을 종료합니다.";
            start         = "프로그램을 시작합니다.";
            connected     = "연결이 되었습니다.";
            disconnected  = "연결이 끊겼습니다.";
            camerastatus1 = "전방 카메라 ";
            camerastatus2 = "후방 카메라 ";
            firstlcs      = "1차로 ";
            secondlcs     = "2차로 ";
            thirdlcs      = "3차로 ";
            alllcs        = "전차로 ";
            stop          = "진입금지";
            forward       = "진입가능";
            this.lgm      = new LogManager(start);
            enterhold     = true;
            enterhold2    = true;
            enterhold3    = true;
            camera1cnt    = 0;
            camera2cnt    = 0;
            video1cnt     = 0;
            video2cnt     = 0;
            gridcnt       = 0;
            imagemax1     = false;
            imagemax2     = false;
            camera1record = 0;
            camera2record = 0;
            this.dataGridView1.ColumnHeadersDefaultCellStyle.Font = new Font("Courier New", 13, FontStyle.Bold);
            this.dataGridView1.DefaultCellStyle.Font          = new Font("Courier New", 13);
            this.dataGridView1.RowsDefaultCellStyle.BackColor = Color.DarkBlue;

            lcsgo     = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=0&on=1&time=0&pwd=0&";
            lcsgostop = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=0&on=0&time=0&pwd=0&";

            lcsstop     = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=1&on=1&time=0&pwd=0&";
            lcsstopstop = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=1&on=0&time=0&pwd=0&";

            lcsflash     = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=2&on=1&time=0&pwd=0&";
            lcsflashstop = "http://192.168.10.105/relay_cgi.cgi?type=0&relay=2&on=0&time=0&pwd=0&";

            //SendLcsData(lcsgo);
            PlayCamera1();
            PlayCamera2();
            hwDeviceType  = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;
            hwDeviceType2 = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;    //temp

            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            //ConfigureHWDecoder(out hwDeviceType);

            isInit  = false;
            isInit2 = false;
        }
Ejemplo n.º 15
0
        public EasyFFmpegManager()
        {
            hwDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;    //temp

            FFmpegBinariesHelper.RegisterFFmpegBinaries();

            //ConfigureHWDecoder(out hwDeviceType);

            isInit = false;
        }
Ejemplo n.º 16
0
        private List <AVHWDeviceType> GetHWDevices()
        {
            List <AVHWDeviceType> hwDevices = new List <AVHWDeviceType>();
            AVHWDeviceType        type      = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE;

            while ((type = av_hwdevice_iterate_types(type)) != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                hwDevices.Add(type);
            }

            return(hwDevices);
        }
Ejemplo n.º 17
0
 public static AVPixelFormat GetHWPixelFormat(AVHWDeviceType hWDevice)
 {
     return(hWDevice switch {
         AVHWDeviceType.AV_HWDEVICE_TYPE_NONE => AVPixelFormat.AV_PIX_FMT_NONE,
         AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU => AVPixelFormat.AV_PIX_FMT_VDPAU,
         AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA => AVPixelFormat.AV_PIX_FMT_CUDA,
         AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI => AVPixelFormat.AV_PIX_FMT_VAAPI,
         AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2 => AVPixelFormat.AV_PIX_FMT_NV12,
         AVHWDeviceType.AV_HWDEVICE_TYPE_QSV => AVPixelFormat.AV_PIX_FMT_QSV,
         AVHWDeviceType.AV_HWDEVICE_TYPE_VIDEOTOOLBOX => AVPixelFormat.AV_PIX_FMT_VIDEOTOOLBOX,
         AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA => AVPixelFormat.AV_PIX_FMT_NV12,
         AVHWDeviceType.AV_HWDEVICE_TYPE_DRM => AVPixelFormat.AV_PIX_FMT_DRM_PRIME,
         AVHWDeviceType.AV_HWDEVICE_TYPE_OPENCL => AVPixelFormat.AV_PIX_FMT_OPENCL,
         AVHWDeviceType.AV_HWDEVICE_TYPE_MEDIACODEC => AVPixelFormat.AV_PIX_FMT_MEDIACODEC,
         _ => AVPixelFormat.AV_PIX_FMT_NONE
     });
Ejemplo n.º 18
0
        public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            source          = url;
            _pFormatContext = ffmpeg.avformat_alloc_context();
            _receivedFrame  = ffmpeg.av_frame_alloc();
            var pFormatContext = _pFormatContext;

            ffmpeg.avformat_open_input(&pFormatContext, source, null, null).ThrowExceptionIfError();
            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();
            AVCodec *codec = null;

            _streamIndex   = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0).ThrowExceptionIfError();
            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);
            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            {
                ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            }
            pStream = _pFormatContext->streams[_streamIndex];
            ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar).ThrowExceptionIfError();
            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();

            CodecName   = ffmpeg.avcodec_get_name(codec->id);
            FrameSize   = new Size(_pCodecContext->width, _pCodecContext->height);
            PixelFormat = _pCodecContext->pix_fmt;


            _pPacket = ffmpeg.av_packet_alloc();
            _pFrame  = ffmpeg.av_frame_alloc();

            PConvertContext = ffmpeg.sws_getContext(_pCodecContext->width, _pCodecContext->height, _pCodecContext->pix_fmt,
                                                    _pCodecContext->width, _pCodecContext->height, AVPixelFormat.AV_PIX_FMT_BGRA,
                                                    ffmpeg.SWS_FAST_BILINEAR, null, null, null);

            if (PConvertContext == null)
            {
                throw new ApplicationException(@"Could not initialize the conversion context.");
            }

            dstData                 = new byte_ptrArray4();
            dstLinesize             = new int_array4();
            convertedFrameBufferPtr = Marshal.AllocHGlobal(ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGRA, _pCodecContext->width, _pCodecContext->height, 1));
            ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize,
                                        (byte *)convertedFrameBufferPtr,
                                        AVPixelFormat.AV_PIX_FMT_BGRA, _pCodecContext->width, _pCodecContext->height, 1);
        }
Ejemplo n.º 19
0
    private Texture2D DecodeFrameToTexture2D(String filename, int frameIndex = 10,
                                             AVHWDeviceType HWDevice         = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
    {
        using (var vsd = new VideoStreamDecoder(filename, HWDevice))
        {
            Debug.Log($"codec name: {vsd.CodecName}");

            var info = vsd.GetContextInfo();
            info.ToList().ForEach(x => Debug.Log($"{x.Key} = {x.Value}"));

            var sourceSize        = vsd.FrameSize;
            var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE
                ? vsd.PixelFormat
                : GetHWPixelFormat(HWDevice);
            var destinationSize        = sourceSize;
            var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
            using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize,
                                                     destinationPixelFormat))
            {
                var currentFrame = 0;

                while (vsd.TryDecodeNextFrame(out var frame) && _isRunning)
                {
                    Debug.Log($"Processing frame: {currentFrame}");
                    var avframe = vfc.Convert(frame);
                    if (OnFrameRendered != null)
                    {
                        byte[] imageData;
                        vsd.AvFrameToImageByteArray(avframe, out imageData);
                        OnFrameRendered(imageData);
                    }

                    if (currentFrame == frameIndex)
                    {
                        Debug.Log($"Saving frame: {frameIndex}");
                        return(vsd.AVFrameToTexture2D(avframe));
                    }

                    currentFrame++;
                }

                return(new Texture2D(4, 4));
            }
        }
    }
        private unsafe AVPixelFormat GetHWPixelFormat(AVHWDeviceType hwDevice, AVCodec *codec)
        {
            const int     AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX = 1;
            AVPixelFormat pixelFormat = AVPixelFormat.AV_PIX_FMT_NONE;

            for (int i = 0; ; i++)
            {
                AVCodecHWConfig *hwConfig = ffmpeg.avcodec_get_hw_config(codec, i);
                if (hwConfig == null)
                {
                    throw new Exception($"Failed to find compatible pixel format for {hwDevice}");
                }
                if ((hwConfig->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) == 0 || hwConfig->device_type != hwDevice)
                {
                    continue;
                }

                AVHWFramesConstraints *hwConstraints = ffmpeg.av_hwdevice_get_hwframe_constraints(_pCodecContext->hw_device_ctx, hwConfig);
                if (hwConstraints != null)
                {
                    for (AVPixelFormat *p = hwConstraints->valid_sw_formats; *p != AVPixelFormat.AV_PIX_FMT_NONE; p++)
                    {
                        pixelFormat = *p;
                        if (ffmpeg.sws_isSupportedInput(pixelFormat) > 0)
                        {
                            break;
                        }
                        else
                        {
                            pixelFormat = AVPixelFormat.AV_PIX_FMT_NONE;
                        }
                    }

                    ffmpeg.av_hwframe_constraints_free(&hwConstraints);
                }

                if (pixelFormat != AVPixelFormat.AV_PIX_FMT_NONE)
                {
                    return(pixelFormat);
                }
            }
        }
Ejemplo n.º 21
0
        public static unsafe byte[]? GetThumbnail(FfmpegSettings settings, bool extendedLogging)
        {
            try {
                if (UseNativeBinding)
                {
                    bool isGrayByte = settings.GrayScale == 1;

                    AVHWDeviceType HWDevice = HardwareAccelerationMode switch {
                        FFHardwareAccelerationMode.vdpau => AVHWDeviceType.AV_HWDEVICE_TYPE_VDPAU,
                        FFHardwareAccelerationMode.dxva2 => AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
                        FFHardwareAccelerationMode.vaapi => AVHWDeviceType.AV_HWDEVICE_TYPE_VAAPI,
                        FFHardwareAccelerationMode.qsv => AVHWDeviceType.AV_HWDEVICE_TYPE_QSV,
                        FFHardwareAccelerationMode.cuda => AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
                        _ => AVHWDeviceType.AV_HWDEVICE_TYPE_NONE
                    };

                    using var vsd = new VideoStreamDecoder(settings.File, HWDevice);
                    if (vsd.PixelFormat < 0 || vsd.PixelFormat >= AVPixelFormat.AV_PIX_FMT_NB)
                    {
                        throw new Exception($"Invalid source pixel format");
                    }

                    Size          sourceSize             = vsd.FrameSize;
                    Size          destinationSize        = isGrayByte ? new Size(16, 16) : new Size(100, Convert.ToInt32(sourceSize.Height * (100 / (double)sourceSize.Width)));
                    AVPixelFormat destinationPixelFormat = isGrayByte ? AVPixelFormat.AV_PIX_FMT_GRAY8 : AVPixelFormat.AV_PIX_FMT_BGRA;
                    using var vfc =
                              new VideoFrameConverter(sourceSize, vsd.PixelFormat, destinationSize, destinationPixelFormat);

                    if (!vsd.TryDecodeFrame(out var srcFrame, settings.Position))
                    {
                        throw new Exception($"Failed decoding frame at {settings.Position}");
                    }
                    AVFrame convertedFrame = vfc.Convert(srcFrame);

                    if (isGrayByte)
                    {
                        int length = ffmpeg.av_image_get_buffer_size(destinationPixelFormat, convertedFrame.width,
                                                                     convertedFrame.height, 1).ThrowExceptionIfError();
                        byte[] data = new byte[length];
                        Marshal.Copy((IntPtr)convertedFrame.data[0], data, 0, length);
                        return(data);
                    }
                    else
                    {
                        int width      = convertedFrame.width;
                        int height     = convertedFrame.height;
                        var totalBytes = width * height * 4;
                        var rgbaBytes  = new byte[totalBytes];
                        int stride     = convertedFrame.linesize[0];
                        if (stride == width * 4)
                        {
                            Marshal.Copy((IntPtr)convertedFrame.data[0], rgbaBytes, 0, totalBytes);
                        }
                        else
                        {
                            var sourceOffset = 0;
                            var destOffset   = 0;
                            var byteWidth    = width * 4;
                            for (var y = 0; y < height; y++)
                            {
                                Marshal.Copy((IntPtr)convertedFrame.data[0] + sourceOffset, rgbaBytes, destOffset, byteWidth);
                                sourceOffset += stride;
                                destOffset   += byteWidth;
                            }
                        }
                        var image = Image.LoadPixelData <SixLabors.ImageSharp.PixelFormats.Bgra32>(rgbaBytes, width, height);
                        using MemoryStream stream = new();
                        image.Save(stream, new SixLabors.ImageSharp.Formats.Jpeg.JpegEncoder());
                        bool equal = rgbaBytes.SequenceEqual(stream.ToArray());
                        return(stream.ToArray());
                    }
                }
            }
            catch (Exception e) {
                Logger.Instance.Info($"Failed using native FFmpeg binding on '{settings.File}', try switching to process mode. Exception: {e}");
            }


            //https://docs.microsoft.com/en-us/dotnet/csharp/how-to/concatenate-multiple-strings#string-literals
            string ffmpegArguments = $" -hide_banner -loglevel {(extendedLogging ? "error" : "quiet")}" +
                                     $" -y -hwaccel {HardwareAccelerationMode} -ss {settings.Position} -i \"{settings.File}\"" +
                                     $" -t 1 -f {(settings.GrayScale == 1 ? "rawvideo -pix_fmt gray" : "mjpeg")} -vframes 1" +
                                     $" {(settings.GrayScale == 1 ? "-s 16x16" : "-vf scale=100:-1")} {CustomFFArguments} \"-\"";

            using var process = new Process {
                      StartInfo = new ProcessStartInfo {
                          Arguments              = ffmpegArguments,
                          FileName               = FFmpegPath,
                          CreateNoWindow         = true,
                          RedirectStandardInput  = false,
                          RedirectStandardOutput = true,
                          WorkingDirectory       = Path.GetDirectoryName(FFmpegPath) !,
                          RedirectStandardError  = extendedLogging,
                          WindowStyle            = ProcessWindowStyle.Hidden
                      }
                  };
            string errOut = string.Empty;

            byte[]? bytes = null;
            try {
                process.EnableRaisingEvents = true;
                process.Start();
                if (extendedLogging)
                {
                    process.ErrorDataReceived += new DataReceivedEventHandler((sender, e) => {
                        if (e.Data?.Length > 0)
                        {
                            errOut += Environment.NewLine + e.Data;
                        }
                    });
                    process.BeginErrorReadLine();
                }
                using var ms = new MemoryStream();
                process.StandardOutput.BaseStream.CopyTo(ms);

                if (!process.WaitForExit(TimeoutDuration))
                {
                    throw new TimeoutException($"FFmpeg timed out on file: {settings.File}");
                }
                else if (extendedLogging)
                {
                    process.WaitForExit();                     // Because of asynchronous event handlers, see: https://github.com/dotnet/runtime/issues/18789
                }
                if (process.ExitCode != 0)
                {
                    throw new FFInvalidExitCodeException($"FFmpeg exited with: {process.ExitCode}");
                }

                bytes = ms.ToArray();
                if (bytes.Length == 0)
                {
                    bytes = null;                       // Makes subsequent checks easier
                }
                else if (settings.GrayScale == 1 && bytes.Length != 256)
                {
                    bytes   = null;
                    errOut += $"{Environment.NewLine}graybytes length != 256";
                }
            }
            catch (Exception e) {
                errOut += $"{Environment.NewLine}{e.Message}";
                try {
                    if (process.HasExited == false)
                    {
                        process.Kill();
                    }
                }
                catch { }
                bytes = null;
            }
            if (bytes == null || errOut.Length > 0)
            {
                string message = $"{((bytes == null) ? "ERROR: Failed to retrieve" : "WARNING: Problems while retrieving")} {(settings.GrayScale == 1 ? "graybytes" : "thumbnail")} from: {settings.File}";
                if (extendedLogging)
                {
                    message += $":{Environment.NewLine}{FFmpegPath} {ffmpegArguments}";
                }
                Logger.Instance.Info($"{message}{errOut}");
            }
            return(bytes);
        }
Ejemplo n.º 22
0
 private static extern int av_hwdevice_ctx_create(AVBufferRef **device_ctx, AVHWDeviceType type, [MarshalAs(UnmanagedType.LPUTF8Str)] string device, AVDictionary *opts, int flags);
Ejemplo n.º 23
0
        public VideoStreamDecoder(string url, VIDEO_INPUT_TYPE inputType = VIDEO_INPUT_TYPE.RTP_RTSP, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            ffmpeg.avdevice_register_all();
            AVFormatContext *pFormatCtx = ffmpeg.avformat_alloc_context();
            AVDictionary *   options    = null;

            ffmpeg.av_dict_set(&options, "list_devices", "true", 0);
            AVInputFormat *iformat = ffmpeg.av_find_input_format("dshow");

            Console.WriteLine("========Device Info=============\n");
            ffmpeg.avformat_open_input(&pFormatCtx, null, iformat, &options);
            Console.WriteLine("===============================\n");

            AVDeviceInfoList *device_list = null;
            int result = ffmpeg.avdevice_list_input_sources(iformat, null, options, &device_list);

            Console.WriteLine(result);

            //iFormatContext = ffmpeg.avform at_alloc_context();
            //receivedFrame = ffmpeg.av_frame_alloc();
            //var _iFormatContext = iFormatContext;

            //int i;

            //AVDictionary* avDict;
            //ffmpeg.av_dict_set(&avDict, "reorder_queue_size", "1", 0);

            //switch (inputType)
            //{
            //    case VIDEO_INPUT_TYPE.CAM_DEVICE:
            //        AVInputFormat* iformat = ffmpeg.av_find_input_format("dshow");
            //        AVDeviceInfoList* listdevice = null;
            //        ffmpeg.avdevice_list_devices(_iFormatContext, (AVDeviceInfoList**)listdevice);


            //        Console.WriteLine(listdevice->devices[0]->ToString());


            //        //ffmpeg.avformat_open_input(&_iFormatContext, url, iformat, null).ThrowExceptionIfError();
            //        break;
            //    case VIDEO_INPUT_TYPE.RTP_RTSP:
            //        ffmpeg.avformat_open_input(&_iFormatContext, @"C:\Users\admin\Desktop\result1.avi", null, null);
            //        break;
            //    default:
            //        break;
            //}

            Console.ReadLine();
            //_iFormatContext->streams[0]->time_base = new AVRational { num = 1, den = 30 };
            //_iFormatContext->streams[0]->avg_frame_rate = new AVRational { num = 30, den = 1 };
            //AVCodec* videoCodec = null;
            //AVCodec* audioCodec = null;

            //for (i = 0; i < _iFormatContext->nb_streams; i++)
            //{
            //    if (_iFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            //    {
            //        videoIndex = i;
            //        videoCodecContext = _iFormatContext->streams[i]->codec;
            //        videoCodec = ffmpeg.avcodec_find_decoder(videoCodecContext->codec_id);
            //    }
            //    else if (_iFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
            //    {
            //        audioCodeContext = _iFormatContext->streams[i]->codec;
            //        audioCodec = ffmpeg.avcodec_find_decoder(audioCodeContext->codec_id);
            //        audioIndex = i;
            //    }
            //}

            //ffmpeg.avformat_find_stream_info(_iFormatContext, null).ThrowExceptionIfError(); //Stream에 접근하기 위해서는 미디어로부터 데이터 읽어야함.

            //videoStreamIndex = ffmpeg.av_find_best_stream(_iFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &videoCodec, 0).ThrowExceptionIfError();
            //audioStreamIndex = ffmpeg.av_find_best_stream(_iFormatContext, AVMediaType.AVMEDIA_TYPE_AUDIO, -1, -1, &audioCodec, 0);

            //Console.WriteLine($"VideoStreamIndex :  {videoIndex}    AudioStreamIndex :  {audioIndex}");
            //Console.WriteLine($"VideoCodec  : {videoCodec->id}    AudioCodec :  {audioCodec->id}");

            //videoCodecContext = ffmpeg.avcodec_alloc_context3(videoCodec);
            //audioCodeContext = ffmpeg.avcodec_alloc_context3(audioCodec);


            //if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
            //{
            //    ffmpeg.av_hwdevice_ctx_create(&videoCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();
            //}

            //ffmpeg.avcodec_parameters_to_context(videoCodecContext, _iFormatContext->streams[videoStreamIndex]->codecpar).ThrowExceptionIfError();   // 동영상 파일에 있는 정보가 컨텍스트에 복사되고 없는 정보는 코덱의 원래 정보가 유지된다. 간단한 코덱은 별도의 옵션이 없지만 고성능 코덱은 동작에 필요한 필수 옵션이 있다. 이 정보를 복사하지 않으면 코덱이 제대로 동작하지 않아 일부 파일이 열리지 않는다. 다음 함수는 코덱을 열어 사용할 준비를 하고 컨텍스트도 코덱에 맞게 초기화한다.
            //ffmpeg.avcodec_parameters_to_context(audioCodeContext, _iFormatContext->streams[audioStreamIndex]->codecpar).ThrowExceptionIfError();

            //ffmpeg.avcodec_open2(videoCodecContext, videoCodec, null).ThrowExceptionIfError();  // 세번째 인수는 코덱으로 전달할 옵션값이며 필요 없으면 NULL로 지정한다.여기까지 진행하면 코덱과 컨텍스트가 모두 완비되어 패킷의 압축을 풀어 프레임 정보를 만들 준비가 되었다. 코덱을 다 사용한 후 다음 함수로 컨텍스트와 관련 메모리를 모두 해제한다.
            //ffmpeg.avcodec_open2(audioCodeContext, audioCodec, null).ThrowExceptionIfError();  // 세번째 인수는 코덱으로 전달할 옵션값이며 필요 없으면 NULL로 지정한다.여기까지 진행하면 코덱과 컨텍스트가 모두 완비되어 패킷의 압축을 풀어 프레임 정보를 만들 준비가 되었다. 코덱을 다 사용한 후 다음 함수로 컨텍스트와 관련 메모리를 모두 해제한다.

            //CodecName = ffmpeg.avcodec_get_name(videoCodec->id);
            //AudioCodecName = ffmpeg.avcodec_get_name(audioCodec->id);
            //swrCtx = ffmpeg.swr_alloc();
            //FrameSize = new Size(videoCodecContext->width, videoCodecContext->height);
            //PixelFormat = videoCodecContext->pix_fmt;
            ////Console.WriteLine(audioCodecName);

            //swrCtx_Audio = ffmpeg.swr_alloc();

            //AVSampleFormat in_sample_fmt = audioCodeContext->sample_fmt;
            //int in_sample_rate = audioCodeContext->sample_rate;
            //long in_ch_layout = (long)audioCodeContext->channel_layout;

            //out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_FLTP;
            //int out_sample_rate = 44100;
            //int out_ch_layout = ffmpeg.AV_CH_LAYOUT_MONO;

            //ffmpeg.swr_alloc_set_opts(swrCtx_Audio, out_ch_layout, out_sample_fmt, out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, null);
            //ffmpeg.swr_init(swrCtx_Audio);
            ////Resampling setting options-------------------------------------------- ---------------end
            ////Get the number of output channels
            //out_channel_nb = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_ch_layout);
            ////Store pcm data
            //out_buffer_audio = (byte*)ffmpeg.av_malloc(2 * 8000);

            //rawPacket = ffmpeg.av_packet_alloc();
            //decodedFrame = ffmpeg.av_frame_alloc();
        }
Ejemplo n.º 24
0
        public static IEnumerable <(PointerBitmap bitmap, VideoFrameState state)> DecodeFrames(string url, AVHWDeviceType HWDevice)
        {
            _EnsureBinariesAreSet();

            using (var vsd = new VideoStreamDecoder(url, HWDevice))
            {
                var info  = GetDecoderInfo(vsd);
                var state = new Dictionary <string, long>();

                var context = new VideoFrameState(info, state);

                var sourceSize             = vsd.FrameSize;
                var sourcePixelFormat      = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);
                var destinationSize        = sourceSize;
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;

                long index = 0;

                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    while (vsd.TryDecodeNextFrame(out var frame))
                    {
                        var convertedFrame = vfc.Convert(frame);

                        state["index"] = index;

                        state["pts"] = frame.pts;
                        // state["pkt_pts"] = frame.pkt_pts;
                        state["pkt_dts"] = frame.pkt_dts;
                        state["best_effort_timestamp"] = frame.best_effort_timestamp;

                        state["display_picture_number"] = frame.display_picture_number;
                        state["coded_picture_number"]   = frame.coded_picture_number;
                        state["decode_error_flags"]     = frame.decode_error_flags;

                        yield return(AsPointerBitmap(convertedFrame), context);

                        ++index;
                    }
                }
            }
        }
Ejemplo n.º 25
0
        private unsafe void DecodeAllFramesToImages(AVHWDeviceType HWDevice)
        {
            isWait = true;
            frames.Clear();
            audioFrames.Clear();
            // decode all frames from url, please not it might local resorce, e.g. string url = "../../sample_mpeg4.mp4";
            //var url = "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4"; // be advised this file holds 1440 frames
            //var url = "rtmp://58.200.131.2:1935/livetv/hunantv";
            int curGuid = currentGuid;

            //FileStream fs = new FileStream("E://tt.mp3", FileMode.OpenOrCreate);
            using (var vsd = new AudioStreamDecoder(url, HWDevice))
            {
                TempBufferSize = vsd.Fps / 2;
                StartVideoThread(vsd.Fps);
                StartAudioThread();
                Console.WriteLine($"FPS:{vsd.Fps}");
                Console.WriteLine($"codec name: {vsd.CodecName}");

                var info = vsd.GetContextInfo();
                info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));

                var sourceSize        = vsd.FrameSize;
                var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);
                //var destinationSize = sourceSize;
                var destinationSize        = new Size(1920, 1080);
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
                using (VideoFrameConverter vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    using (AudioFrameConverter afc = new AudioFrameConverter(vsd.in_sample_fmt, vsd.in_sample_rate, vsd.in_channels))
                    {
                        naudioInit(vsd.in_sample_rate, vsd.in_channels);
                        Stopwatch stopwatch = new Stopwatch();
                        while (vsd.TryDecodeNextFrame(out var frame, out bool isVideo))
                        {
                            if (curGuid != currentGuid)
                            {
                                break;
                            }
                            if (isVideo)
                            {
                                if (!isLastMode)
                                {
                                    while (frames.Count >= TempBufferSize)
                                    {
                                        if (curGuid != currentGuid)
                                        {
                                            break;
                                        }
                                        Thread.Sleep(1);
                                    }
                                }
                                //stopwatch.Start();
                                AVFrame convertedFrame = vfc.Convert(frame);
                                int     length         = convertedFrame.height * convertedFrame.linesize[0];
                                byte[]  managedArray   = IntPrtToBytes((IntPtr)convertedFrame.data[0], length);
                                VideoSt st             = new VideoSt()
                                {
                                    data   = managedArray,
                                    width  = convertedFrame.width,
                                    height = convertedFrame.height,
                                    stride = convertedFrame.linesize[0]
                                };
                                frames.Add(st);
                                if (frames.Count >= TempBufferSize)
                                {
                                    isWait = false;
                                }
                                //stopwatch.Stop();
                                //Console.WriteLine($"解析时间:{stopwatch.ElapsedMilliseconds}毫秒");
                                //stopwatch.Reset();
                            }
                            else
                            {
                                var    convertedFrame = afc.Convert(frame);
                                int    length         = convertedFrame.pkt_size;
                                byte[] managedArray   = new byte[0];
                                if (managedArray.Length != length)
                                {
                                    managedArray = new byte[length];
                                }
                                Marshal.Copy((IntPtr)convertedFrame.data[0], managedArray, 0, managedArray.Length);
                                audioFrames.Add(new AudioSt()
                                {
                                    data = managedArray
                                });
                            }
                        }
                    }
                }
            }
            //fs.Close();
        }
Ejemplo n.º 26
0
 public MediaDecoder(Uri source, AVHWDeviceType hardwareDevice)
 {
     Source         = source;
     HardwareDevice = hardwareDevice;
     Open();
 }
Ejemplo n.º 27
0
        public BGBuilder(AVHWDeviceType HWDevice, string url)
        {
            var  mediaInfo       = new MediaInfoDotNet.MediaFile(url);
            var  videoInfo       = mediaInfo.Video[0];
            var  videoFrameRate  = videoInfo.frameRate; // Hz (fps)
            var  videoFrameCount = videoInfo.frameCount;
            int  bgFrameCount    = 0;
            long bgFrameInterval = 5 * (long)videoInfo.frameRate; // in frames
            long frameCount      = 0L;

            using (var vsd = new VideoStreamDecoder(url, HWDevice))
            {
                var           srcSize        = vsd.FrameSize;
                var           srcPixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);
                var           dstSize        = srcSize;
                var           dstPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
                var           width          = dstSize.Width;
                var           height         = dstSize.Height;
                List <Bitmap> frames         = new List <Bitmap>();

                using (var vfc = new VideoFrameConverter(srcSize, srcPixelFormat, dstSize, dstPixelFormat))
                {
                    // Every 30s, vote on most "unchanged" pixel colours (every 5 seconds = 6 images) and assemble the BG
                    // Store the voted-unchanged-colour images
                    while (vsd.TryDecodeNextFrame(out var frame))
                    {
                        if (frameCount % bgFrameInterval == 0)
                        {
                            var    convertedFrame = vfc.Convert(frame);
                            Bitmap currImage      = new Bitmap(width, height, convertedFrame.linesize[0], PixelFormat.Format24bppRgb, (IntPtr)convertedFrame.data[0]);
                            //currImage.Save($"bg{bgFrameCount:D6}.jpg", ImageFormat.Jpeg);

                            frames.Add(new Bitmap(currImage));
                            int numFrames = 20;
                            if (frames.Count == numFrames)
                            {
                                Rectangle    rect      = new Rectangle(0, 0, currImage.Width, currImage.Height);
                                Bitmap       bgDst     = new Bitmap(rect.Width, rect.Height, PixelFormat.Format24bppRgb);
                                BitmapData   bgDstData = bgDst.LockBits(rect, ImageLockMode.WriteOnly, PixelFormat.Format24bppRgb);
                                BitmapData[] bmpDatas  = new BitmapData[numFrames];
                                byte *[]     p         = new byte *[numFrames];
                                int          colourDen = 12;
                                int          colourDim = 256 / colourDen;
                                for (int i = 0; i < numFrames; i++)
                                {
                                    bmpDatas[i] = frames[i].LockBits(rect, ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
                                    p[i]        = (byte *)bmpDatas[i].Scan0.ToPointer();
                                }

                                byte *d = (byte *)bgDstData.Scan0.ToPointer();
                                for (int yi = 0; yi < rect.Height; yi++)
                                {
                                    for (int xi = 0; xi < rect.Width; xi++, d += 3)
                                    {
                                        SortedDictionary <int, List <Color> > votes = new SortedDictionary <int, List <Color> >();
                                        for (int k = 0; k < numFrames; k++)
                                        {
                                            Color c = Color.FromArgb(*(p[k] + 0), *(p[k] + 1), *(p[k] + 2));
                                            // 3D Color-space 256/3 ^3
                                            int colorKey = (c.R / colourDen) +
                                                           (c.G / colourDen) * colourDim +
                                                           (c.B / colourDen) * colourDim * colourDim;

                                            if (!votes.ContainsKey(colorKey))
                                            {
                                                votes[colorKey] = new List <Color>();
                                            }
                                            votes[colorKey].Add(c);

                                            p[k] += 3;
                                        }

                                        int maxVotes      = int.MinValue;
                                        int keyOfMaxVotes = -1;
                                        foreach (var key in votes.Keys)
                                        {
                                            if (votes[key].Count > maxVotes)
                                            {
                                                maxVotes      = votes[key].Count;
                                                keyOfMaxVotes = key;
                                            }
                                        }

                                        int[] sum = new int[3];
                                        for (int k = 0; k < 3; k++)
                                        {
                                            sum[k] = 0;
                                        }

                                        for (int k = 0; k < votes[keyOfMaxVotes].Count; k++)
                                        {
                                            sum[0] += votes[keyOfMaxVotes][k].R;
                                            sum[1] += votes[keyOfMaxVotes][k].G;
                                            sum[2] += votes[keyOfMaxVotes][k].B;
                                        }
                                        //if (votes[keyOfMaxVotes].Count != numFrames)
                                        //    Console.WriteLine("Interesting");

                                        *(d + 0) = (byte)((float)sum[0] / (float)votes[keyOfMaxVotes].Count);
                                        *(d + 1) = (byte)((float)sum[1] / (float)votes[keyOfMaxVotes].Count);
                                        *(d + 2) = (byte)((float)sum[2] / (float)votes[keyOfMaxVotes].Count);
                                    }
                                }

                                for (int i = 0; i < 6; i++)
                                {
                                    frames[i].UnlockBits(bmpDatas[i]);
                                }
                                bgDst.UnlockBits(bgDstData);
                                bgDst.Save($"bgc{frameCount:D06}.jpg", ImageFormat.Jpeg);

                                frames.Clear();
                            }

                            bgFrameCount++;
                        }

                        frameCount++;
                    }
                }
            }
        }
Ejemplo n.º 28
0
        private unsafe void ProcessFrames(AVHWDeviceType HWDevice, string url, long startFrame = 0, long endFrame = long.MaxValue)
        {
            _videoFilepath     = url;
            _processingEnabled = true;
            _perfTimer         = new FPSTimer((int)_videoFrameRate);
            long leadInFrames = 5;

            if (!_processMultithreaded && leadInFrames < startFrame) // dont't do leadIn if we're too close to the start of the video
            {
                startFrame = startFrame - leadInFrames;
            }
            else
            {
                leadInFrames = 0L;
            }

            // Don't set endFrame in most cases since we want to be able to seek to end and not have this loop exit
            //if (endFrame > _videoFrameCount)
            //    endFrame = _videoFrameCount - 1;

            using (var vsd = new VideoStreamDecoder(url, HWDevice))
            {
                Console.WriteLine($"codec name: {vsd.CodecName}");

                var info = vsd.GetContextInfo();
                info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));

                var sourceSize             = vsd.FrameSize;
                var sourcePixelFormat      = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);
                var destinationSize        = sourceSize;
                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_BGR24;
                int framesSinceSeek        = 0;
                int framesSinceSeekThresh  = 5;

                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))
                {
                    _frameNumber = startFrame;
                    if (startFrame != 0)
                    {
                        vsd.Seek(startFrame);
                    }
                    //byte[] currFrameData = new byte[destinationSize.Width * destinationSize.Height * 3];
                    //byte[] prevFrameData = new byte[destinationSize.Width * destinationSize.Height * 3];
                    var width  = destinationSize.Width;
                    var height = destinationSize.Height;

                    Image <Bgr, byte> prevImage = new Image <Bgr, byte>(width, height); //Image Class from Emgu.CV
                    //FrameBlender[] backgroundBuilders = new FrameBlender[processSettings.backgroundFrameBlendInterval];
                    //Bitmap[] bgs = new Bitmap[processSettings.backgroundFrameBlendInterval];
                    //for (int i = 0; i < backgroundBuilders.Length; i++) {
                    //    backgroundBuilders[i] = new FrameBlender(width, height, processSettings.backgroundFrameBlendCount);
                    //    bgs[i] = new Bitmap(width, height, PixelFormat.Format32bppArgb);
                    //}
                    //FrameBlender frameSmoother = new FrameBlender(width, height, _processSettings.frameBlendCount);
                    Image <Bgra, float> background = null;
                    //var bgBuilder = new Emgu.CV.BackgroundSubtractorMOG2(500, 16, false);
                    var bgBuilder = new Emgu.CV.BackgroundSubtractorKNN(500, 4.0, false);
                    Image <Gray, byte> foregroundMask = new Image <Gray, byte>(width, height);
                    var currForeground = new Image <Bgr, byte>(width, height);
                    var prevForeground = new Image <Bgr, byte>(width, height);
                    //var movement = new Image<Gray, byte>(width, height);
                    //var movementHist = new Image<Gray, byte>(width, height);

                    bool    decoderActive = true;
                    AVFrame frame;
                    while (decoderActive && (_frameNumber < endFrame) && _processingEnabled)
                    {
                        int seekFrameNum = _trackBarPos;
                        if (!_processMultithreaded)
                        {
                            if (Math.Abs(_frameNumber - seekFrameNum) > 250)
                            {
                                vsd.Seek(seekFrameNum);
                                _frameNumber    = seekFrameNum;
                                framesSinceSeek = 0;
                            }

                            var trackBarSetMI = new MethodInvoker(() => trackBar1.Value = Math.Min((int)_frameNumber, trackBar1.Maximum - 1));
                            trackBar1.Invoke(trackBarSetMI);
                        }

                        try
                        {
                            decoderActive = vsd.TryDecodeNextFrame(out frame);
                        }
                        catch
                        {
                            continue;
                        }

                        if (framesSinceSeek < framesSinceSeekThresh)
                        {
                            ++framesSinceSeek;
                        }

                        while (_processingSleep)
                        {
                            Thread.Sleep(500);
                        }

                        var convertedFrame = vfc.Convert(frame);

                        Image <Bgr, byte> currImage = new Image <Bgr, byte>(width, height, convertedFrame.linesize[0], (IntPtr)convertedFrame.data[0]);
                        // Shadow reduction: Shadows are lindear and less vertical, so stretch the image wider
                        // and then resize back to original aspect-ratio to discard some horizontal detail.
                        // Also, people are taller than bikes & balls
                        if (_settingsControl.EnableShadowReduction)
                        {
                            currImage = currImage.Resize(width, height / 8, Emgu.CV.CvEnum.Inter.Area).Resize(width, height, Emgu.CV.CvEnum.Inter.Area);
                        }

                        // Smooth using multiple frames
                        // Use 10% of previous frame for some speckle-noise & flicker reduction
                        if (_settingsControl.EnableDespeckle)
                        {
                            currImage = currImage.SmoothGaussian(3);
                        }
                        currImage = (0.3 * currImage.Mat + 0.7 * prevImage.Mat).ToImage <Bgr, byte>();
                        //currImage = frameSmoother.Update(currImage.ToBitmap()).ToImage<Bgr, byte>();

                        if (!_maskSet)
                        {
                            using (Bitmap bmp = ShowEditMaskForm(currImage.ToBitmap(), _mask))
                            {
                                _maskSet = true;

                                if (bmp == null)
                                {
                                    continue;
                                }

                                _mask = GetMatFromSDImage(bmp).ToImage <Bgra, byte>()[2]; // mask is red-channel
                                // Clean-up and invert to form the correct mask
                                var whiteImg = new Image <Gray, byte>(width, height);
                                whiteImg.SetValue(255);
                                _mask = whiteImg.Copy(_mask).Not();
                            }
                        }

                        bgBuilder.Apply(currImage, foregroundMask);
                        if (_frameNumber == 0L)
                        {
                            background = currImage.Convert <Bgra, float>();
                        }
                        else
                        {
                            Image <Bgra, float> newBg = currImage.Convert <Bgra, float>();
                            newBg[3].SetValue(1, foregroundMask.Not());
                            background[3].SetValue(1);
                            background = (background.Mat * .97 + newBg.Mat * .03).ToImage <Bgra, float>();

                            //var picMI = new MethodInvoker(() => pictureBox1.Image = background.ToBitmap());
                            //pictureBox1.Invoke(picMI);
                        }

                        if (leadInFrames > 4L)
                        {
                            leadInFrames--;
                            _frameNumber++;
                            continue; // skip further processing until the lead-in is complete
                        }
                        else if (leadInFrames > 0L)
                        {
                            leadInFrames--;
                        }

                        Mat foregroundMat = background.Convert <Bgr, byte>().Not().Mat + currImage.Mat;
                        currForeground = foregroundMat.ToImage <Bgr, byte>();
                        if (_settingsControl.EnableDespeckle)
                        {
                            currForeground = currForeground.SmoothGaussian(3); // remove speckle (video low-light noise, small birds, insects, etc)
                        }
                        //currForeground = currImage.Copy(foregroundMask.Not());

                        Mat moveMat = currForeground.Mat - prevForeground.Mat;
                        _movement = (1.02 * (_movement.Mat * 0.85 + 0.15 * ((moveMat - _settingsControl.MovementNoiseFloor) * _settingsControl.MovementPixMul).ToImage <Bgr, byte>().Convert <Gray, float>().Mat)).ToImage <Gray, float>();

                        //var picMI = new MethodInvoker(() => pictureBox1.Image = _movement.ToBitmap());
                        //pictureBox1.Invoke(picMI);

                        if (_mask != null)
                        {
                            _movement = _movement.Copy(_mask);
                        }

                        currImage            = new Image <Bgr, byte>(width, height, convertedFrame.linesize[0], (IntPtr)convertedFrame.data[0]);
                        prevImage.Bytes      = currImage.Bytes;
                        prevForeground.Bytes = currForeground.Bytes;

                        if (leadInFrames == 0)
                        {
                            int    currentFps     = _perfTimer.Update();
                            double processingRate = (double)currentFps / (double)_videoFrameRate;
                            var    time           = TimeSpan.FromSeconds((double)_frameNumber / (double)_videoFrameRate);
                            // https://docs.microsoft.com/en-us/dotnet/standard/base-types/custom-timespan-format-strings
                            statusLabel.Text          = $"{time:hh\\:mm\\:ss}";
                            statusProcessingRate.Text = $"Processing@{ processingRate: 0.0}x";
                            //var moveScore = _movement.GetSum().Intensity * _settingsControl.MovementScoreMul;

                            // *** Do perspective correction to movement score
                            var moveScore = 0.0;
                            for (int yi = 0; yi != _movement.Height; ++yi)
                            {
                                // Assume that a foreground person is 2x taller than a background person
                                double pc = 1.0 - ((double)yi / (double)_movement.Height); // 0..1, bottom..top
                                pc  = 0.5 + 0.5 * pc;                                      // 0.5..1.0, bottom..top
                                pc *= 2.0;                                                 // foreground person is 2x the height of background person
                                pc *= pc;                                                  // squared since movement~area
                                Rectangle           rowRect = new Rectangle(0, yi, _movement.Width, 1);
                                Image <Gray, float> row     = _movement.GetSubRect(rowRect);
                                moveScore += row.GetSum().Intensity *_settingsControl.MovementScoreMul *pc;
                            }

                            if (framesSinceSeek == framesSinceSeekThresh)
                            {
                                if (_frameNumber < _movementScores.Length)
                                {
                                    _movementScores[_frameNumber] = (float)moveScore;
                                }
                                _movementScoreMax = Math.Max(_movementScoreMax, (float)moveScore);
                                UpdateChart();
                                // decay the maximum slowly to ensure early "noise" peaks don't destroy scaling forever
                                _movementScoreMax *= 0.9999f;
                            }
                        }
                        _processedFrameCount++;
                        _frameNumber++;

                        if (!_processMultithreaded && (_frameNumber < endFrame))
                        {
                            // Off-load some processing to another thread to allow faster updates on the main processing thread
                            Task.Run(() =>
                            {
                                try
                                {
                                    UpdateProcessMainView(currImage.Mat);
                                }
                                catch { }
                            });
                        }

                        if (_frameNumber == endFrame)
                        {
                            var mi = new MethodInvoker(() => ProcessingCompleteAndExport());
                            this.Invoke(mi);
                        }
                    }
                }
            }
        }
Ejemplo n.º 29
0
    public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
    {
        _pFormatContext = ffmpeg.avformat_alloc_context();
        _receivedFrame  = ffmpeg.av_frame_alloc();

        var pFormatContext = _pFormatContext;

        ffmpeg.avformat_open_input(&pFormatContext, url, null, null).ThrowExceptionIfError();

        AVCodec *    codec     = null;
        AVBufferRef *codecBuff = null;

        if (HWDeviceType is AVHWDeviceType.AV_HWDEVICE_TYPE_QSV)
        {
            codec     = ffmpeg.avcodec_find_decoder_by_name("h264_qsv");
            codec->id = AVCodecID.AV_CODEC_ID_H264;

            for (int i = 0; i < _pFormatContext->nb_streams; i++)
            {
                AVStream *st = _pFormatContext->streams[i];

                if (st->codecpar->codec_id == AVCodecID.AV_CODEC_ID_H264 && _videoStream == null)
                {
                    _videoStream = st;
                    Console.WriteLine("Stream founded!");
                }
                else
                {
                    st->discard = AVDiscard.AVDISCARD_ALL;
                }
            }
            _streamIndex = _videoStream->index;
        }
        else
        {
            _streamIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &codec, 0);
        }

        _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);

        if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)
        {
            if (ffmpeg.av_hwdevice_ctx_create(&_pCodecContext->hw_device_ctx,
                                              HWDeviceType, "auto", null, 0) < 0)
            {
                throw new Exception("HW device init ERROR!");
            }
            else
            {
                Console.WriteLine("Device " + HWDeviceType + " init OK");
                isHwAccelerate = true;
            }
        }

        if (_pCodecContext == null)
        {
            throw new Exception("Codec init error");
        }

        ffmpeg.avformat_find_stream_info(_pFormatContext, null);     //This is necessary to determine the parameters of online broadcasting

        if (_videoStream->codecpar->extradata != null)
        {
            int size = (int)(_videoStream->codecpar->extradata_size);
            _pCodecContext->extradata = (byte *)ffmpeg.av_mallocz((ulong)size +
                                                                  ffmpeg.AV_INPUT_BUFFER_PADDING_SIZE);
            _pCodecContext->extradata_size = (int)size +
                                             ffmpeg.AV_INPUT_BUFFER_PADDING_SIZE;

            FFmpegHelper.memcpy((IntPtr)_pCodecContext->extradata,
                                (IntPtr)_videoStream->codecpar->extradata,
                                size);

            //Or just

            /*for (int i = 0; i < size; i++)
             *  _pCodecContext->extradata[i] = _videoStream->codecpar->extradata[i];*/
        }

        if (HWDeviceType == AVHWDeviceType.AV_HWDEVICE_TYPE_QSV)
        {
            _pCodecContext->get_format = get_fmt;
        }

        ffmpeg.avcodec_parameters_to_context(_pCodecContext, _videoStream->codecpar);
        ffmpeg.avcodec_open2(_pCodecContext, codec, null);

        CodecName   = ffmpeg.avcodec_get_name(codec->id);
        FrameSize   = new Size(_videoStream->codecpar->width, _videoStream->codecpar->height);
        PixelFormat = _pCodecContext->sw_pix_fmt;     //It doesn't work (== AV_HWDEVICE_TYPE_NONE before the first frame)

        Console.WriteLine("Codec: " + CodecName.ToString());
        Console.WriteLine("Size: " + FrameSize.ToString());
        Console.WriteLine("PixelFormat: " + PixelFormat.ToString());


        _pPacket = ffmpeg.av_packet_alloc();
        _pFrame  = ffmpeg.av_frame_alloc();
    }
        /// <summary>
        /// Handles the MediaOpening event of the Media control.
        /// </summary>
        /// <param name="sender">The source of the event.</param>
        /// <param name="e">The <see cref="MediaOpeningEventArgs"/> instance containing the event data.</param>
        private void OnMediaOpening(object sender, MediaOpeningEventArgs e)
        {
            // You can start off by adjusting subtitles delay
            // e.Options.SubtitlesDelay = TimeSpan.FromSeconds(7); // See issue #216

            // Example of automatically side-loading SRT subs
            try
            {
                var inputUrl = e.Info.InputUrl;
                var url      = new Uri(inputUrl);
                if (url.IsFile || url.IsUnc)
                {
                    inputUrl = System.IO.Path.ChangeExtension(url.LocalPath, "srt");
                    if (System.IO.File.Exists(inputUrl))
                    {
                        e.Options.SubtitlesUrl = inputUrl;
                    }
                }
            }
            catch { }

            // You can force video FPS if necessary
            // see: https://github.com/unosquare/ffmediaelement/issues/212
            // e.Options.VideoForcedFps = 25;

            // An example of specifcally selecting a subtitle stream
            var subtitleStreams       = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_SUBTITLE).Select(kvp => kvp.Value);
            var englishSubtitleStream = subtitleStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en"));

            if (englishSubtitleStream != null)
            {
                e.Options.SubtitleStream = englishSubtitleStream;
            }

            // An example of specifcally selecting an audio stream
            var audioStreams       = e.Info.Streams.Where(kvp => kvp.Value.CodecType == AVMediaType.AVMEDIA_TYPE_AUDIO).Select(kvp => kvp.Value);
            var englishAudioStream = audioStreams.FirstOrDefault(s => s.Language != null && s.Language.ToLowerInvariant().StartsWith("en"));

            if (englishAudioStream != null)
            {
                e.Options.AudioStream = englishAudioStream;
            }

            // Setting Advanced Video Stream Options is also possible
            var videoStream = e.Options.VideoStream;

            if (videoStream != null)
            {
                // Check if the video requires deinterlacing
                var requiresDeinterlace = videoStream.FieldOrder != AVFieldOrder.AV_FIELD_PROGRESSIVE &&
                                          videoStream.FieldOrder != AVFieldOrder.AV_FIELD_UNKNOWN;

                // Hardwrae device priorities
                var deviceCandidates = new AVHWDeviceType[]
                {
                    AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
                    AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA,
                    AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2
                };

                // Hardware device selection
                if (videoStream.FPS <= 30)
                {
                    foreach (var deviceType in deviceCandidates)
                    {
                        var accelerator = videoStream.HardwareDevices.FirstOrDefault(d => d.DeviceType == deviceType);
                        if (accelerator != null)
                        {
                            if (GuiContext.Current.IsInDebugMode)
                            {
                                e.Options.VideoHardwareDevice = accelerator;
                            }

                            break;
                        }
                    }
                }

                // Start building a video filter
                var videoFilter = new StringBuilder();

                // The yadif filter deinterlaces the video; we check the field order if we need
                // to deinterlace the video automatically
                if (requiresDeinterlace)
                {
                    videoFilter.Append("yadif,");
                }

                // Scale down to maximum 1080p screen resolution.
                if (videoStream.PixelHeight > 1080)
                {
                    // e.Options.VideoHardwareDevice = null;
                    videoFilter.Append($"scale=-1:1080,");
                }

                e.Options.VideoFilter = videoFilter.ToString().TrimEnd(',');

                // Since the MediaElement control belongs to a different thread
                // we have to set properties on its UI thread.
                GuiContext.Current.EnqueueInvoke(() =>
                {
                    Media.ClosedCaptionsChannel = videoStream.HasClosedCaptions ?
                                                  CaptionsChannel.CC1 : CaptionsChannel.CCP;
                });
            }

            // e.Options.AudioFilter = "aecho=0.8:0.9:1000:0.3";
            // e.Options.AudioFilter = "chorus=0.5:0.9:50|60|40:0.4|0.32|0.3:0.25|0.4|0.3:2|2.3|1.3";
        }