/// <summary>
 /// Initializes a new instance of the <see cref="HardwareAccelerator"/> class.
 /// </summary>
 /// <param name="component">The component this accelerator is attached to.</param>
 /// <param name="selectedConfig">The selected hardware device configuration.</param>
 public HardwareAccelerator(VideoComponent component, HardwareDeviceInfo selectedConfig)
 {
     Component         = component;
     Name              = selectedConfig.DeviceTypeName;
     DeviceType        = selectedConfig.DeviceType;
     PixelFormat       = selectedConfig.PixelFormat;
     GetFormatCallback = new AVCodecContext_get_format(GetPixelFormat);
 }
Exemplo n.º 2
0
        private void SetupHardwareDecoding(AVCodec *codec)
        {
            AVHWDeviceType hwtype;

            for (int i = 0; ; i++)
            {
                AVCodecHWConfig *config = ffmpeg.avcodec_get_hw_config(codec, i);
                if (config == null)
                {
                    LogMessage("Hardware decoder not supported for this codec.");
                    return;
                }

                if ((config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) == AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX)
                {
                    _hwPixFmt = config->pix_fmt;
                    hwtype    = config->device_type;
                    break;
                }
            }

            ffmpeg.avcodec_parameters_to_context(_videoCodecContext, _videoStream->codecpar);
            _getFormatCallback             = GetPixelFormat;
            _videoCodecContext->get_format = _getFormatCallback;

            AVBufferRef *hwDeviceCtx = null;

            if (ffmpeg.av_hwdevice_ctx_create(&hwDeviceCtx, hwtype, null, null, 0) < 0)
            {
                LogMessage("Failed to create specified HW device.");
                _hwDeviceCtx = null;
                return;
            }

            _videoCodecContext->hw_device_ctx = ffmpeg.av_buffer_ref(hwDeviceCtx);
            _hwDeviceCtx = hwDeviceCtx;
            LogMessage("Using hardware decoder: " + hwtype);
        }
Exemplo n.º 3
0
        // ReSharper restore PrivateFieldCanBeConvertedToLocalVariable

        public FormatContext(Stream stream)
        {
            _stream        = stream;
            _readFunc      = IoReadPacket;
            _seekFunc      = IoSeek;
            _getFormatFunc = GetFormat;
            // Both the buffer and the IO context are freed by avformat_close_input.
            byte *       ioBuffer  = (byte *)ffmpeg.av_malloc(IoBufferSize);
            AVIOContext *ioContext = ffmpeg.avio_alloc_context(
                ioBuffer, IoBufferSize,
                write_flag: 0, opaque: null,
                _readFunc, null, _seekFunc
                );

            AVFormatContext *ctx = ffmpeg.avformat_alloc_context();

            ctx->pb = ioContext;
            _ctx    = ctx;

            _recvPacket = ffmpeg.av_packet_alloc();
            CheckResult(ffmpeg.avformat_open_input(&ctx, string.Empty, null, null));
            CheckResult(ffmpeg.avformat_find_stream_info(ctx, null));
        }
Exemplo n.º 4
0
        public Decoder(string format, string camera, DataTransfer <MatAndBuffer> dataTransfer, bool enableHardware = false)
        {
            _dataTransfer = dataTransfer;
            var inputFormat = ffmpeg.av_find_input_format(format);

            if (inputFormat == null)
            {
                throw new ApplicationException($"Failed to find input format '{format}'");
            }

            var inputContext = ffmpeg.avformat_alloc_context();

            try
            {
                AVDictionary *options = null;
                ffmpeg.av_dict_set(&options, "video_size", "640x480", ffmpeg.AV_DICT_APPEND);
                ffmpeg.avformat_open_input(&inputContext, camera, inputFormat, &options).ThrowExceptionIfError();
                ffmpeg.av_dict_free(&options);
                options = null;

                try
                {
                    ffmpeg.avformat_find_stream_info(inputContext, null).ThrowExceptionIfError();
                    AVCodec *decoder;
                    var      videoStream = ffmpeg.av_find_best_stream(inputContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &decoder, 0);
                    if (videoStream < 0)
                    {
                        throw new ApplicationException("No video stream found");
                    }

                    AVBufferRef *hwDeviceContext = null;
                    var(hwPixelFormat, pixelFormat) = enableHardware ? SortoutHardware(decoder, out hwDeviceContext) : (AVPixelFormat.AV_PIX_FMT_NONE, AVPixelFormat.AV_PIX_FMT_NONE);

                    var decoderContext = ffmpeg.avcodec_alloc_context3(decoder);
                    var video          = inputContext->streams[videoStream];
                    video->discard = AVDiscard.AVDISCARD_NONKEY;
                    ffmpeg.avcodec_parameters_to_context(decoderContext, video->codecpar).ThrowExceptionIfError();

                    if (hwPixelFormat != AVPixelFormat.AV_PIX_FMT_NONE)
                    {
                        AVCodecContext_get_format getFormat = (_, formats) =>
                        {
                            //AVPixelFormat* pixelFormat;
                            for (var pixelFormat = formats; *pixelFormat != AVPixelFormat.AV_PIX_FMT_NONE; pixelFormat++)
                            {
                                if (*pixelFormat == hwPixelFormat)
                                {
                                    return(*pixelFormat);
                                }
                            }
                            throw new ApplicationException("Failed to get hardware pixel format");
                        };

                        decoderContext->get_format = getFormat;
                    }

                    ffmpeg.av_opt_set_int(decoderContext, "refcounted_frames", 1, 0);

                    if (hwPixelFormat != AVPixelFormat.AV_PIX_FMT_NONE)
                    {
                        decoderContext->hw_device_ctx = ffmpeg.av_buffer_ref(hwDeviceContext);
                    }
                    else
                    {
                        pixelFormat = ConvertFormat(video->codec->pix_fmt);
                    }

                    ffmpeg.avcodec_open2(decoderContext, decoder, null).ThrowExceptionIfError();

                    // Now all opened
                    _inputContext   = inputContext;
                    _decoderContext = decoderContext;
                    CodecName       = ffmpeg.avcodec_get_name(decoder->id);
                    FrameSize       = new System.Drawing.Size(video->codec->width, video->codec->height);
                    PixelFormat     = pixelFormat;
                    StreamIndex     = videoStream;
                    _hwPixelFormat  = hwPixelFormat;

                    _converter = new Converter(FrameSize, pixelFormat);

                    Console.WriteLine($"Opened stream {StreamIndex} of {CodecName} as {FrameSize.Width} x {FrameSize.Height} @ {PixelFormat}");
                }
                catch (Exception)
                {
                    ffmpeg.avformat_close_input(&inputContext);
                    throw;
                }
            }
            catch (Exception)
            {
                ffmpeg.avformat_free_context(inputContext);
                throw;
            }
        }
Exemplo n.º 5
0
 /// <summary>
 /// Prevents a default instance of the <see cref="HardwareAccelerator"/> class from being created.
 /// </summary>
 private HardwareAccelerator()
 {
     // prevent instantiation outside this class
     GetFormatCallback = new AVCodecContext_get_format(GetPixelFormat);
 }
Exemplo n.º 6
0
        /// <summary>
        /// Initializes a new instance of the <see cref="FFmpegCodec"/> class.
        /// </summary>
        public FFmpegCodec(GraphicsDevice graphcsDevice, AVCodecContext *originalContext)
        {
            var codecId = originalContext->codec_id;
            var pCodec  = ffmpeg.avcodec_find_decoder(codecId);

            if (pCodec == null)
            {
                // TODO: log?
                throw new ApplicationException("Unsupported codec.");
            }

            int ret;
            var pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec);
            var pCodecParam   = ffmpeg.avcodec_parameters_alloc();

            ret = ffmpeg.avcodec_parameters_from_context(pCodecParam, originalContext);
            if (ret < 0)
            {
                // TODO: log?
                throw new ApplicationException($"Could not retrieve codec parameters. Error code={ret.ToString("X8")}");
            }

            // Set the context get_format function
            getFormat = (context, formats) =>
            {
                AVPixelFormat *pixelFormat;

                for (pixelFormat = formats; *pixelFormat != AVPixelFormat.AV_PIX_FMT_NONE; pixelFormat++)
                {
                    if (*pixelFormat == HardwarePixelFormat)
                    {
                        return(*pixelFormat);
                    }
                }

                throw new ApplicationException("Failed to get HW surface format.");
            };
            pCodecContext->get_format = getFormat;

            ret = ffmpeg.avcodec_parameters_to_context(pCodecContext, pCodecParam);
            if (ret < 0)
            {
                // TODO: log?
                throw new ApplicationException($"Could not fill codec parameters. Error code={ret.ToString("X8")}");
            }

            // create the hardware device context.
            AVBufferRef *pHWDeviceContextLocal;

            if (ffmpeg.av_hwdevice_ctx_create(&pHWDeviceContextLocal, HardwareDeviceType, null, null, 0) >= 0)
            {
                IsHardwareAccelerated        = true;
                pHWDeviceContext             = pHWDeviceContextLocal;
                pCodecContext->hw_device_ctx = ffmpeg.av_buffer_ref(pHWDeviceContext);
            }

            // Setup hardware acceleration context
            //if (IsHardwareAccelerated)
            //    CreateHarwareAccelerationContext(graphcsDevice, pCodecContext, pCodec);

            if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
            {
                pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
            }

            if (ffmpeg.avcodec_is_open(pCodecContext) == 0)
            {
                ret = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
                if (ret < 0)
                {
                    // TODO: log?
                    throw new ApplicationException($"Could not open codec. Error code={ret.ToString("X8")}");
                }
            }
            ffmpeg.avcodec_parameters_free(&pCodecParam);

            pAVCodecContext = pCodecContext;
        }
Exemplo n.º 7
0
        /// <summary>
        /// Decoder Function for Thread. All containted in one to simplify memory management of the unmanaged resources
        /// </summary>
        private unsafe void Decoder()
        {
            int retErr = 0;

            //Open Input Context to open media
            AVFormatContext *pInputContext = ffmpeg.avformat_alloc_context();

            if (ffmpeg.avformat_open_input(&pInputContext, uri.OriginalString, null, null) != 0)
            {
                throw new ApplicationException(@"Could not open file");
            }

            if (ffmpeg.avformat_find_stream_info(pInputContext, null) != 0)
            {
                throw new ApplicationException(@"Could not find stream info");
            }

            //Loop thru to find the stream that the video
            AVStream *pStream = null;

            for (var i = 0; i < pInputContext->nb_streams; i++)
            {
                if (pInputContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                {
                    pStream = pInputContext->streams[i];
                    break;
                }
            }
            if (pStream == null)
            {
                throw new ApplicationException(@"Could not found video stream");
            }

            WriteLine($"Codec name: {ffmpeg.avcodec_get_name(pStream->codec->codec_id)} CodecID: {pStream->codec->codec_id}");

            //Get Codec name from input stream
            string codecname = ffmpeg.avcodec_get_name(pStream->codec->codec_id);

            AVCodec *pCodec = null;

            //Open Codec, cuvid has sepearte encoders to make the hwaccel work
            if (CurrentHWDecode == HWDecode.CUDA)
            {
                pCodec = ffmpeg.avcodec_find_decoder_by_name(codecname + "_cuvid");
                if (pCodec == null)
                {
                    //use software codec if hardware type doesnt exist
                    WriteLine("Failing back to Software from: " + codecname);
                    CurrentHWDecode = HWDecode.SW;
                    pCodec          = ffmpeg.avcodec_find_decoder_by_name(codecname);
                }
            }
            else
            {
                pCodec = ffmpeg.avcodec_find_decoder_by_name(codecname);
            }

            if (pCodec == null)
            {
                throw new ApplicationException(@"Unsupported codec");
            }

            //create new deocder context
            AVCodecContext *pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec);

            ffmpeg.avcodec_parameters_to_context(pCodecContext, pStream->codecpar);

            //read size and fps from input stream
            int width  = pStream->codec->width;
            int height = pStream->codec->height;

            SwsContext *pConvertContext = null;

            if (CurrentHWDecode != HWDecode.D3D11)
            {
                pConvertContext = ffmpeg.sws_getContext(width, height, pCodecContext->pix_fmt, width, height, AVPixelFormat.AV_PIX_FMT_BGRA, ffmpeg.SWS_FAST_BILINEAR, null, null, null);

                if (pConvertContext == null)
                {
                    throw new ApplicationException(@"Could not initialize the conversion context");
                }
            }

            // HWaccel Decoder Setup
            if (!(CurrentHWDecode == HWDecode.SW))
            {
                WriteLine("Initilizing Hwaccel: " + CurrentHWDecode.ToString());
                switch (CurrentHWDecode)
                {
                case HWDecode.CUDA:
                    retErr = ffmpeg.av_hwdevice_ctx_create(&pCodecContext->hw_device_ctx, AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA, "auto", null, 0);
                    pCodecContext->get_format = new AVCodecContext_get_format(Get_Format_CUDA);
                    break;

                case HWDecode.D3D11:
                    retErr = ffmpeg.av_hwdevice_ctx_create(&pCodecContext->hw_device_ctx, AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA, "auto", null, 0);
                    pCodecContext->get_format = new AVCodecContext_get_format(Get_Format_D3D11);
                    break;

                case HWDecode.DXVA2:
                    retErr = ffmpeg.av_hwdevice_ctx_create(&pCodecContext->hw_device_ctx, AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2, "auto", null, 0);
                    pCodecContext->get_format = new AVCodecContext_get_format(Get_Format_DXVA);
                    break;
                }

                if (retErr < 0)
                {
                    WriteLine("Error creating a HWAccel device");
                    return;
                }
            }

            lock (FFLockObject) {
                if (ffmpeg.avcodec_open2(pCodecContext, pCodec, null) < 0)
                {
                    throw new ApplicationException(@"Could not open codec");
                }
            }

            //Create Packet Object for reading from file
            AVPacket  packet  = new AVPacket();
            AVPacket *pPacket = &packet;

            //Init Packet Memory
            ffmpeg.av_init_packet(pPacket);

            //Create Frames for decoder
            AVFrame *pDecodedFrame    = null; // frame from decoder
            AVFrame *pHWTransferFrame = null; //frame to transfer from hwaccel
            AVFrame *pConvertedFrame  = null; //frame for output for software

            switch (CurrentHWDecode)
            {
            case HWDecode.CUDA:
            case HWDecode.DXVA2:
            case HWDecode.SW:
                pDecodedFrame    = ffmpeg.av_frame_alloc();
                pHWTransferFrame = ffmpeg.av_frame_alloc();
                pConvertedFrame  = ffmpeg.av_frame_alloc();
                break;

            case HWDecode.D3D11:
                pDecodedFrame = ffmpeg.av_frame_alloc();
                break;
            }

            //Sizes for SW Decode Bitmap
            int convertedFrameBufferSize = 0;

            byte[] convertedFrameBufferArray = null;
            var    dstData     = new byte_ptrArray4();
            var    dstLinesize = new int_array4();

            if (CurrentHWDecode == HWDecode.SW)
            {
                convertedFrameBufferSize  = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_RGBA, width, height, 1);
                convertedFrameBufferArray = new byte[convertedFrameBufferSize];
            }

            //D3D11 DXGI resource and surface index
            uint *resource      = null;
            uint *resourceIndex = null;


            //Playback Loop Variables
            Stopwatch sw = new Stopwatch();
            int       FrameRateLoopTime = Convert.ToInt32((float)1000 / (pStream->codec->framerate.num / pStream->codec->framerate.den));
            int       frameNumber       = 0;
            int       readFrameResult   = 0;
            bool      doLoop            = false;
            bool      emptyPacket;

            fixed(byte *convertedFrameBuffer = convertedFrameBufferArray)
            {
                if (CurrentHWDecode == HWDecode.SW)
                {
                    retErr = ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, convertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_RGBA, width, height, 1);
                }

                while ((readFrameResult != EndOfFileErrorCode || doLoop) && MediaStopping == false)
                {
                    if (MediaPause)
                    {
                        Thread.Sleep(FrameRateLoopTime);
                    }
                    else
                    {
                        try {
                            //Read packet from file
                            readFrameResult = ffmpeg.av_read_frame(pInputContext, pPacket);

                            if (readFrameResult == 0)
                            {
                                doLoop = false;
                            }
                            else if (readFrameResult == EndOfFileErrorCode)
                            {
                                doLoop      = true;
                                frameNumber = 0;
                                //Rewind the clip
                                ffmpeg.av_seek_frame(pInputContext, -1, 0, ffmpeg.AVSEEK_FLAG_BACKWARD);
                            }
                            else if (readFrameResult < 0)
                            {
                                break;
                            }

                            emptyPacket = readFrameResult == EndOfFileErrorCode;

                            if (pPacket->stream_index != pStream->index)
                            {
                                continue;
                            }

                            if (readFrameResult == SuccessCode)
                            {
                                //submit packet to decoder
                                int sendResult = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
                                if (sendResult < 0)
                                {
                                    break;
                                }
                                else
                                {
                                    //read packet from decoder
                                    retErr = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);

                                    if (retErr == 0)
                                    {
                                        //got a decoded frame
                                        switch (CurrentHWDecode)
                                        {
                                        case HWDecode.CUDA:
                                        case HWDecode.DXVA2:
                                            //copy from GPU to CPU
                                            retErr = ffmpeg.av_hwframe_transfer_data(pHWTransferFrame, pDecodedFrame, 0);
                                            if (retErr == 0)
                                            {
                                                //Convert from NV12 to RGBA
                                                ffmpeg.sws_scale(pConvertContext, pHWTransferFrame->data, pHWTransferFrame->linesize, 0, height, dstData, dstLinesize);
                                            }
                                            break;

                                        case HWDecode.D3D11:
                                            //get handle to Texture2D and texture index
                                            resource      = (uint *)pDecodedFrame->data[0];
                                            resourceIndex = (uint *)pDecodedFrame->data[1];
                                            break;

                                        case HWDecode.SW:
                                            //Convert from NV12 to RGBA
                                            ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
                                            break;
                                        }
                                    }
                                    else if (retErr == -11 || retErr == EndOfFileErrorCode)
                                    {
                                        //skip frame.
                                    }
                                    else if (retErr < 0)
                                    {
                                        string msg = GetFFmpegErrorMessage(retErr);
                                        throw new ApplicationException($@"Error while receiving frame {frameNumber}\n Message: {msg}");
                                    }
                                }
                            }
                        }
                        finally {
                            ffmpeg.av_packet_unref(pPacket);
                            ffmpeg.av_frame_unref(pDecodedFrame);
                            ffmpeg.av_frame_unref(pHWTransferFrame);
                        }

                        //wait for syncronisation from GenLock
                        GenLock.GenLockEvent.WaitOne();

                        //Update Image in WPF
                        if (CurrentHWDecode == HWDecode.D3D11)
                        {
                            UpdateImage((IntPtr)resource, (uint)resourceIndex);
                        }
                        else
                        {
                            UpdateImage(width, height, (IntPtr)convertedFrameBuffer, convertedFrameBufferSize, dstLinesize[0]);
                        }
                    }
                }
            }

            convertedFrameBufferArray = null;

            //flush decoder
            packet.data = null;
            packet.size = 0;
            ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
            ffmpeg.av_packet_unref(pPacket);

            //free frames
            ffmpeg.av_frame_free(&pDecodedFrame);
            ffmpeg.av_frame_free(&pHWTransferFrame);
            ffmpeg.av_frame_free(&pConvertedFrame);

            ffmpeg.sws_freeContext(pConvertContext);

            //close input
            WriteLine("close input");
            ffmpeg.avformat_close_input(&pInputContext);

            WriteLine("close codec");
            ffmpeg.avcodec_close(pCodecContext);

            WriteLine("Free D3D");
            ShutdownD3D();

            WriteLine("Close HW Frames");
            ffmpeg.av_buffer_unref(&pCodecContext->hw_frames_ctx);

            WriteLine("free codec");
            ffmpeg.avcodec_free_context(&pCodecContext);

            MediaStopped  = true;
            MediaStopping = true;
        }