Example #1
0
        /// <summary>
        /// Attaches a hardware accelerator to this video component.
        /// </summary>
        /// <param name="selectedConfig">The selected configuration.</param>
        /// <returns>
        /// Whether or not the hardware accelerator was attached.
        /// </returns>
        public bool AttachHardwareDevice(HardwareDeviceInfo selectedConfig)
        {
            // Check for no device selection
            if (selectedConfig == null)
            {
                return(false);
            }

            try
            {
                var accelerator = new HardwareAccelerator(this, selectedConfig);

                AVBufferRef *devContextRef  = null;
                var          initResultCode = ffmpeg.av_hwdevice_ctx_create(&devContextRef, accelerator.DeviceType, null, null, 0);
                if (initResultCode < 0)
                {
                    throw new MediaContainerException($"Unable to initialize hardware context for device {accelerator.Name}");
                }

                HardwareDeviceContext       = devContextRef;
                HardwareAccelerator         = accelerator;
                CodecContext->hw_device_ctx = ffmpeg.av_buffer_ref(HardwareDeviceContext);
                CodecContext->get_format    = accelerator.GetFormatCallback;

                return(true);
            }
            catch (Exception ex)
            {
                this.LogError(Aspects.Component, "Could not attach hardware decoder.", ex);
                return(false);
            }
        }
Example #2
0
        /// <inheritdoc />
        protected override MediaFrame CreateFrameSource(IntPtr framePointer)
        {
            // Validate the video frame
            var frame = (AVFrame *)framePointer;

            if (framePointer == IntPtr.Zero || frame->width <= 0 || frame->height <= 0)
            {
                return(null);
            }

            // Move the frame from hardware (GPU) memory to RAM (CPU)
            if (HardwareAccelerator != null)
            {
                frame = HardwareAccelerator.ExchangeFrame(CodecContext, frame, out var isHardwareFrame);
                IsUsingHardwareDecoding = isHardwareFrame;
            }

            // Init the filter graph for the frame
            InitializeFilterGraph(frame);

            AVFrame *outputFrame;

            // Changes in the filter graph can be applied by calling the ChangeMedia command
            if (FilterGraph != null)
            {
                // Allocate the output frame
                outputFrame = MediaFrame.CloneAVFrame(frame);

                var result = ffmpeg.av_buffersrc_add_frame(SourceFilter, outputFrame);
                while (result >= 0)
                {
                    result = ffmpeg.av_buffersink_get_frame_flags(SinkFilter, outputFrame, 0);
                }

                if (outputFrame->width <= 0 || outputFrame->height <= 0)
                {
                    // If we don't have a valid output frame simply release it and
                    // return the original input frame
                    MediaFrame.ReleaseAVFrame(outputFrame);
                    outputFrame = frame;
                }
                else
                {
                    // the output frame is the new valid frame (output frame).
                    // therefore, we need to release the original
                    MediaFrame.ReleaseAVFrame(frame);
                }
            }
            else
            {
                outputFrame = frame;
            }

            // Check if the output frame is valid
            if (outputFrame->width <= 0 || outputFrame->height <= 0)
            {
                return(null);
            }

            // Create the frame holder object and return it.
            return(new VideoFrame(outputFrame, this));
        }