Esempio n. 1
0
        /// <summary>
        /// Initializes static members of the <see cref="HardwareAccelerator"/> class.
        /// </summary>
        static HardwareAccelerator()
        {
            Dxva2 = new HardwareAccelerator
            {
                Name             = "DXVA2",
                DeviceType       = AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
                PixelFormat      = AVPixelFormat.AV_PIX_FMT_DXVA2_VLD,
                RequiresTransfer = true,
            };

            Cuda = new HardwareAccelerator
            {
                Name             = "CUVID",
                DeviceType       = AVHWDeviceType.AV_HWDEVICE_TYPE_CUDA,
                PixelFormat      = AVPixelFormat.AV_PIX_FMT_CUDA,
                RequiresTransfer = false,
            };

            All = new ReadOnlyDictionary <AVPixelFormat, HardwareAccelerator>(
                new Dictionary <AVPixelFormat, HardwareAccelerator>()
            {
                { Dxva2.PixelFormat, Dxva2 },
                { Cuda.PixelFormat, Cuda }
            });
        }
        /// <summary>
        /// Attaches a hardware accelerator to this video component.
        /// </summary>
        /// <param name="selectedConfig">The selected configuration.</param>
        /// <returns>
        /// Whether or not the hardware accelerator was attached
        /// </returns>
        public bool AttachHardwareDevice(HardwareDeviceInfo selectedConfig)
        {
            // Check for no device selection
            if (selectedConfig == null)
            {
                return(false);
            }

            try
            {
                var accelerator = new HardwareAccelerator(this, selectedConfig);

                AVBufferRef *devContextRef  = null;
                var          initResultCode = ffmpeg.av_hwdevice_ctx_create(&devContextRef, accelerator.DeviceType, null, null, 0);
                if (initResultCode < 0)
                {
                    throw new MediaContainerException($"Unable to initialize hardware context for device {accelerator.Name}");
                }

                HardwareDeviceContext       = devContextRef;
                HardwareAccelerator         = accelerator;
                CodecContext->hw_device_ctx = ffmpeg.av_buffer_ref(HardwareDeviceContext);
                CodecContext->get_format    = accelerator.GetFormatCallback;

                return(true);
            }
            catch (Exception ex)
            {
                this.LogError(Aspects.Component, "Could not attach hardware decoder.", ex);
                return(false);
            }
        }
Esempio n. 3
0
        /// <summary>
        /// Initializes static members of the <see cref="HardwareAccelerator"/> class.
        /// </summary>
        static HardwareAccelerator()
        {
            Dxva2 = new HardwareAccelerator
            {
                Name        = "DXVA2",
                DeviceType  = AVHWDeviceType.AV_HWDEVICE_TYPE_DXVA2,
                PixelFormat = AVPixelFormat.AV_PIX_FMT_DXVA2_VLD,
            };

            All = new ReadOnlyDictionary <AVPixelFormat, HardwareAccelerator>(
                new Dictionary <AVPixelFormat, HardwareAccelerator>()
            {
                { Dxva2.PixelFormat, Dxva2 }
            });
        }
Esempio n. 4
0
        /// <summary>
        /// Attaches a hardware accelerator to the specified component.
        /// </summary>
        /// <param name="component">The component.</param>
        /// <param name="selectedConfig">The selected configuration.</param>
        /// <returns>
        /// Whether or not the hardware accelerator was attached
        /// </returns>
        public static bool Attach(VideoComponent component, HardwareDeviceInfo selectedConfig)
        {
            try
            {
                var result = new HardwareAccelerator
                {
                    Component   = component,
                    Name        = selectedConfig.DeviceTypeName,
                    DeviceType  = selectedConfig.DeviceType,
                    PixelFormat = selectedConfig.PixelFormat,
                };

                result.InitializeHardwareContext();
                return(true);
            }
            catch (Exception ex)
            {
                component.Container.Parent?.Log(MediaLogMessageType.Error, $"Could not attach hardware decoder. {ex.Message}");
                return(false);
            }
        }
Esempio n. 5
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MediaComponent"/> class.
        /// </summary>
        /// <param name="container">The container.</param>
        /// <param name="streamIndex">Index of the stream.</param>
        /// <exception cref="ArgumentNullException">container</exception>
        /// <exception cref="MediaContainerException">The container exception.</exception>
        protected MediaComponent(MediaContainer container, int streamIndex)
        {
            // Parted from: https://github.com/FFmpeg/FFmpeg/blob/master/fftools/ffplay.c#L2559
            // avctx = avcodec_alloc_context3(NULL);
            Container    = container ?? throw new ArgumentNullException(nameof(container));
            CodecContext = ffmpeg.avcodec_alloc_context3(null);
            RC.Current.Add(CodecContext, $"134: {nameof(MediaComponent)}[{MediaType}].ctor()");
            StreamIndex = streamIndex;
            Stream      = container.InputContext->streams[StreamIndex];
            StreamInfo  = container.MediaInfo.Streams[StreamIndex];

            // Set default codec context options from probed stream
            var setCodecParamsResult = ffmpeg.avcodec_parameters_to_context(CodecContext, Stream->codecpar);

            if (setCodecParamsResult < 0)
            {
                Container.Parent?.Log(MediaLogMessageType.Warning, $"Could not set codec parameters. Error code: {setCodecParamsResult}");
            }

            // We set the packet timebase in the same timebase as the stream as opposed to the tpyical AV_TIME_BASE
            if (this is VideoComponent && Container.MediaOptions.VideoForcedFps > 0)
            {
                var fpsRational = ffmpeg.av_d2q(Container.MediaOptions.VideoForcedFps, 1000000);
                Stream->r_frame_rate       = fpsRational;
                CodecContext->pkt_timebase = new AVRational {
                    num = fpsRational.den, den = fpsRational.num
                };
            }
            else
            {
                CodecContext->pkt_timebase = Stream->time_base;
            }

            // Find the default decoder codec from the stream and set it.
            var      defaultCodec = ffmpeg.avcodec_find_decoder(Stream->codec->codec_id);
            AVCodec *forcedCodec  = null;

            // If set, change the codec to the forced codec.
            if (Container.MediaOptions.DecoderCodec.ContainsKey(StreamIndex) &&
                string.IsNullOrWhiteSpace(Container.MediaOptions.DecoderCodec[StreamIndex]) == false)
            {
                var forcedCodecName = Container.MediaOptions.DecoderCodec[StreamIndex];
                forcedCodec = ffmpeg.avcodec_find_decoder_by_name(forcedCodecName);
                if (forcedCodec == null)
                {
                    Container.Parent?.Log(MediaLogMessageType.Warning,
                                          $"COMP {MediaType.ToString().ToUpperInvariant()}: Unable to set decoder codec to '{forcedCodecName}' on stream index {StreamIndex}");
                }
            }

            // Check we have a valid codec to open and process the stream.
            if (defaultCodec == null && forcedCodec == null)
            {
                var errorMessage = $"Fatal error. Unable to find suitable decoder for {Stream->codec->codec_id.ToString()}";
                CloseComponent();
                throw new MediaContainerException(errorMessage);
            }

            var      codecCandidates = new AVCodec *[] { forcedCodec, defaultCodec };
            AVCodec *selectedCodec   = null;
            var      codecOpenResult = 0;

            foreach (var codec in codecCandidates)
            {
                if (codec == null)
                {
                    continue;
                }

                // Pass default codec stuff to the codec contect
                CodecContext->codec_id = codec->id;
                if ((codec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) != 0)
                {
                    CodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
                }
                if ((codec->capabilities & ffmpeg.AV_CODEC_FLAG2_CHUNKS) != 0)
                {
                    CodecContext->flags |= ffmpeg.AV_CODEC_FLAG2_CHUNKS;
                }

                // Process the decoder options
                {
                    var decoderOptions = Container.MediaOptions.DecoderParams;

                    // Configure the codec context flags
                    if (decoderOptions.EnableFastDecoding)
                    {
                        CodecContext->flags2 |= ffmpeg.AV_CODEC_FLAG2_FAST;
                    }
                    if (decoderOptions.EnableLowDelay)
                    {
                        CodecContext->flags |= ffmpeg.AV_CODEC_FLAG_LOW_DELAY;
                    }

                    // process the low res option
                    if (decoderOptions.EnableLowRes && codec->max_lowres > 0)
                    {
                        decoderOptions.LowResIndex = codec->max_lowres.ToString(CultureInfo.InvariantCulture);
                    }

                    // Ensure ref counted frames for audio and video decoding
                    if (CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO || CodecContext->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
                    {
                        decoderOptions.RefCountedFrames = "1";
                    }
                }

                // Setup additional settings. The most important one is Threads -- Setting it to 1 decoding is very slow. Setting it to auto
                // decoding is very fast in most scenarios.
                var codecOptions = Container.MediaOptions.DecoderParams.GetStreamCodecOptions(Stream->index);

                // Enable Hardware acceleration if requested
                if (this is VideoComponent && container.MediaOptions.VideoHardwareDevice != null)
                {
                    HardwareAccelerator.Attach(this as VideoComponent, container.MediaOptions.VideoHardwareDevice);
                }

                // Open the CodecContext. This requires exclusive FFmpeg access
                lock (CodecOpenLock)
                {
                    fixed(AVDictionary **codecOptionsRef = &codecOptions.Pointer)
                    codecOpenResult = ffmpeg.avcodec_open2(CodecContext, codec, codecOptionsRef);
                }

                // Check if the codec opened successfully
                if (codecOpenResult < 0)
                {
                    Container.Parent?.Log(MediaLogMessageType.Warning,
                                          $"Unable to open codec '{FFInterop.PtrToStringUTF8(codec->name)}' on stream {streamIndex}");

                    continue;
                }

                // If there are any codec options left over from passing them, it means they were not consumed
                var currentEntry = codecOptions.First();
                while (currentEntry != null && currentEntry?.Key != null)
                {
                    Container.Parent?.Log(MediaLogMessageType.Warning,
                                          $"Invalid codec option: '{currentEntry.Key}' for codec '{FFInterop.PtrToStringUTF8(codec->name)}', stream {streamIndex}");
                    currentEntry = codecOptions.Next(currentEntry);
                }

                selectedCodec = codec;
                break;
            }

            if (selectedCodec == null)
            {
                CloseComponent();
                throw new MediaContainerException($"Unable to find suitable decoder codec for stream {streamIndex}. Error code {codecOpenResult}");
            }

            // Startup done. Set some options.
            Stream->discard = AVDiscard.AVDISCARD_DEFAULT;
            MediaType       = (MediaType)CodecContext->codec_type;

            // Compute the start time
            if (Stream->start_time == ffmpeg.AV_NOPTS_VALUE)
            {
                StartTimeOffset = Container.MediaStartTimeOffset;
            }
            else
            {
                StartTimeOffset = Stream->start_time.ToTimeSpan(Stream->time_base);
            }

            // compute the duration
            if (Stream->duration == ffmpeg.AV_NOPTS_VALUE || Stream->duration == 0)
            {
                Duration = Container.InputContext->duration.ToTimeSpan();
            }
            else
            {
                Duration = Stream->duration.ToTimeSpan(Stream->time_base);
            }

            CodecId   = Stream->codec->codec_id;
            CodecName = FFInterop.PtrToStringUTF8(selectedCodec->name);
            Bitrate   = Stream->codec->bit_rate < 0 ? 0 : Convert.ToUInt64(Stream->codec->bit_rate);
            Container.Parent?.Log(MediaLogMessageType.Debug,
                                  $"COMP {MediaType.ToString().ToUpperInvariant()}: Start Offset: {StartTimeOffset.Format()}; Duration: {Duration.Format()}");
        }
        /// <inheritdoc />
        protected override MediaFrame CreateFrameSource(IntPtr framePointer)
        {
            // Validate the video frame
            var frame = (AVFrame *)framePointer;

            if (framePointer == IntPtr.Zero || frame->width <= 0 || frame->height <= 0)
            {
                return(null);
            }

            // Move the frame from hardware (GPU) memory to RAM (CPU)
            if (HardwareAccelerator != null)
            {
                frame = HardwareAccelerator.ExchangeFrame(CodecContext, frame, out var isHardwareFrame);
                IsUsingHardwareDecoding = isHardwareFrame;
            }

            // Init the filter graph for the frame
            if (string.IsNullOrWhiteSpace(FilterString) == false)
            {
                InitializeFilterGraph(frame);
            }

            AVFrame *outputFrame;

            // Changes in the filter graph can be applied by calling the ChangeMedia command
            if (FilterGraph != null)
            {
                // Allocate the output frame
                outputFrame = MediaFrame.CloneAVFrame(frame);

                var result = ffmpeg.av_buffersrc_add_frame(SourceFilter, outputFrame);
                while (result >= 0)
                {
                    result = ffmpeg.av_buffersink_get_frame_flags(SinkFilter, outputFrame, 0);
                }

                if (outputFrame->width <= 0 || outputFrame->height <= 0)
                {
                    // If we don't have a valid output frame simply release it and
                    // return the original input frame
                    MediaFrame.ReleaseAVFrame(outputFrame);
                    outputFrame = frame;
                }
                else
                {
                    // the output frame is the new valid frame (output frame).
                    // therefore, we need to release the original
                    MediaFrame.ReleaseAVFrame(frame);
                }
            }
            else
            {
                outputFrame = frame;
            }

            // Check if the output frame is valid
            if (outputFrame->width <= 0 || outputFrame->height <= 0)
            {
                return(null);
            }

            // Create the frame holder object and return it.
            return(new VideoFrame(outputFrame, this));
        }