public void UseHW() { _haInfo = null; foreach (var haa in HardwareAccelerator.GetCompatibleDevices(AVCodecID.AV_CODEC_ID_H264)) { if (haa.DeviceType == AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA) { _haInfo = haa; } } }
/// <summary> /// Extracts the stream infos from the input. /// </summary> /// <param name="inputContext">The input context.</param> /// <returns>The list of stream infos.</returns> private static List <StreamInfo> ExtractStreams(AVFormatContext *inputContext) { var result = new List <StreamInfo>(32); if (inputContext->streams == null) { return(result); } for (var i = 0; i < inputContext->nb_streams; i++) { var s = inputContext->streams[i]; var codecContext = ffmpeg.avcodec_alloc_context3(null); ffmpeg.avcodec_parameters_to_context(codecContext, s->codecpar); // Fields which are missing from AVCodecParameters need to be taken // from the stream's AVCodecContext codecContext->properties = s->codec->properties; codecContext->codec = s->codec->codec; codecContext->qmin = s->codec->qmin; codecContext->qmax = s->codec->qmax; codecContext->coded_width = s->codec->coded_height; codecContext->coded_height = s->codec->coded_width; var bitsPerSample = codecContext->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO ? ffmpeg.av_get_bits_per_sample(codecContext->codec_id) : 0; var dar = s->display_aspect_ratio; var sar = s->sample_aspect_ratio; var codecSar = s->codecpar->sample_aspect_ratio; if (sar.num != 0 && (sar.num != codecSar.num || sar.den != codecSar.den)) { ffmpeg.av_reduce( &dar.num, &dar.den, s->codecpar->width * sar.num, s->codecpar->height * sar.den, 1024 * 1024); } var stream = new StreamInfo { StreamId = s->id, StreamIndex = s->index, Metadata = FFDictionary.ToDictionary(s->metadata), CodecType = codecContext->codec_type, CodecTypeName = ffmpeg.av_get_media_type_string(codecContext->codec_type), Codec = codecContext->codec_id, CodecName = ffmpeg.avcodec_get_name(codecContext->codec_id), CodecProfile = ffmpeg.avcodec_profile_name(codecContext->codec_id, codecContext->profile), ReferenceFrameCount = codecContext->refs, CodecTag = codecContext->codec_tag, PixelFormat = codecContext->pix_fmt, FieldOrder = codecContext->field_order, IsInterlaced = codecContext->field_order != AVFieldOrder.AV_FIELD_PROGRESSIVE && codecContext->field_order != AVFieldOrder.AV_FIELD_UNKNOWN, ColorRange = codecContext->color_range, PixelWidth = codecContext->width, PixelHeight = codecContext->height, HasClosedCaptions = (codecContext->properties & ffmpeg.FF_CODEC_PROPERTY_CLOSED_CAPTIONS) != 0, IsLossless = (codecContext->properties & ffmpeg.FF_CODEC_PROPERTY_LOSSLESS) != 0, BitRate = bitsPerSample > 0 ? bitsPerSample * codecContext->channels * codecContext->sample_rate : codecContext->bit_rate, MaxBitRate = codecContext->rc_max_rate, InfoFrameCount = s->codec_info_nb_frames, TimeBase = s->time_base, SampleFormat = codecContext->sample_fmt, SampleRate = codecContext->sample_rate, DisplayAspectRatio = dar, SampleAspectRatio = sar, Disposition = s->disposition, StartTime = s->start_time.ToTimeSpan(s->time_base), Duration = s->duration.ToTimeSpan(s->time_base), FPS = s->avg_frame_rate.ToDouble(), TBR = s->r_frame_rate.ToDouble(), TBN = 1d / s->time_base.ToDouble(), TBC = 1d / s->codec->time_base.ToDouble() }; // Extract valid hardware configurations stream.HardwareDevices = HardwareAccelerator.GetCompatibleDevices(stream.Codec); stream.HardwareDecoders = GetHardwareDecoders(stream.Codec); // TODO: I chose not to include Side data but I could easily do so // https://ffmpeg.org/doxygen/3.2/dump_8c_source.html // See function: dump_sidedata ffmpeg.avcodec_free_context(&codecContext); result.Add(stream); } return(result); }