/// <summary> /// Initializes a new instance of the <see cref="MediaInfo"/> class. /// </summary> /// <param name="container">The input container context.</param> internal unsafe MediaInfo(AVFormatContext *container) { FilePath = new IntPtr(container->url).Utf8ToString(); ContainerFormat = new IntPtr(container->iformat->name).Utf8ToString(); Metadata = new ContainerMetadata(container->metadata); Bitrate = container->bit_rate > 0 ? container->bit_rate : 0; var timeBase = new AVRational { num = 1, den = ffmpeg.AV_TIME_BASE }; Duration = container->duration != ffmpeg.AV_NOPTS_VALUE ? container->duration.ToTimeSpan(timeBase) : TimeSpan.Zero; StartTime = container->start_time != ffmpeg.AV_NOPTS_VALUE ? container->start_time.ToTimeSpan(timeBase) : TimeSpan.Zero; Chapters = new ReadOnlyCollection <MediaChapter>(ParseChapters(container)); fileInfo = new Lazy <FileInfo>(() => { try { var info = new FileInfo(FilePath); return(info); } catch (Exception) { return(null); } }); }
public static extern System.Int64 av_add_stable( AVRational ts_tb, [MarshalAs(UnmanagedType.I8)] System.Int64 ts, AVRational inc_tb, [MarshalAs(UnmanagedType.I8)] System.Int64 inc);
public static extern System.Int32 av_compare_ts( [MarshalAs(UnmanagedType.I8)] System.Int64 ts_a, AVRational tb_a, [MarshalAs(UnmanagedType.I8)] System.Int64 ts_b, AVRational tb_b);
// Code from rational.h /** * Compare two rationals. * @param a first rational * @param b second rational * @return 0 if a==b, 1 if a>b and -1 if a<b. */ public static int av_cmp_q(AVRational a, AVRational b) { long tmp = a.num * (long)b.den - b.num * (long)a.den; if (tmp > 0) return (int)((tmp >> 63) | 1); else return 0; }
public unsafe Remuxing(string inputFile) { string outputFile = Path.GetFileNameWithoutExtension(inputFile) + "_remuxing" + Path.GetExtension(inputFile); using (MediaReader reader = new MediaReader(inputFile)) using (MediaWriter writer = new MediaWriter(outputFile)) { // add stream with reader's codec_id for (int i = 0; i < reader.Count; i++) { writer.AddStream(reader[i], writer.Format.Flags); } writer.Initialize(); // read and write packet foreach (var packet in reader.ReadPacket()) { int index = packet.StreamIndex; AVRounding rounding = AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX; AVRational inTimeBase = reader[index].TimeBase; AVRational outTimeBase = writer[index].TimeBase; packet.Pts = ffmpeg.av_rescale_q_rnd(packet.Pts, inTimeBase, outTimeBase, rounding); packet.Dts = ffmpeg.av_rescale_q_rnd(packet.Dts, inTimeBase, outTimeBase, rounding); packet.Duration = ffmpeg.av_rescale_q(packet.Duration, inTimeBase, outTimeBase); packet.Pos = -1; writer.WritePacket(packet); } writer.FlushMuxer(); } }
/// <summary> /// Converts the given <paramref name="timestamp"/> in the <paramref name="timeBase"/> units to a <see cref="TimeSpan"/> object. /// </summary> /// <param name="timestamp">The timestamp.</param> /// <param name="timeBase">The time base unit.</param> /// <returns>The converted <see cref="TimeSpan"/>.</returns> public static TimeSpan ToTimeSpan(this long timestamp, AVRational timeBase) { var ts = Convert.ToDouble(timestamp); var tb = timeBase.ToDouble(); return(TimeSpan.FromMilliseconds(ts * tb * 1000)); }
public static extern System.Int32 av_opt_set_video_rate( IntPtr /* void* */ obj, [MarshalAs(UnmanagedType.LPStr)] string name, AVRational val, [MarshalAs(UnmanagedType.I4)] System.Int32 search_flags);
/// <summary> /// Initializes a new instance of the <see cref="VideoComponent"/> class. /// </summary> /// <param name="container">The container.</param> /// <param name="streamIndex">Index of the stream.</param> internal VideoComponent(MediaContainer container, int streamIndex) : base(container, streamIndex) { BaseFrameRateQ = ffmpeg.av_guess_frame_rate(container.InputContext, Stream, null); FilterString = container.MediaOptions.VideoFilter; if (double.IsNaN(BaseFrameRate)) { BaseFrameRateQ = Stream->r_frame_rate; } CurrentFrameRate = BaseFrameRate; if (double.IsNaN(CurrentFrameRate)) { CurrentFrameRate = Stream->avg_frame_rate.ToDouble(); } FrameWidth = Stream->codec->width; FrameHeight = Stream->codec->height; // Retrieve Matrix Rotation var displayMatrixRef = ffmpeg.av_stream_get_side_data(Stream, AVPacketSideDataType.AV_PKT_DATA_DISPLAYMATRIX, null); DisplayRotation = ComputeRotation(displayMatrixRef); }
public H264VideoStreamEncoder(Stream stream, int fps, Size frameSize) { _stream = stream; _frameSize = frameSize; var codecId = AVCodecID.AV_CODEC_ID_H264; _pCodec = ffmpeg.avcodec_find_encoder(codecId); if (_pCodec == null) { throw new InvalidOperationException("Codec not found."); } _pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec); _pCodecContext->width = frameSize.Width; _pCodecContext->height = frameSize.Height; _pCodecContext->time_base = new AVRational { num = 1, den = fps }; _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryslow", 0); ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError(); _linesizeY = frameSize.Width; _linesizeU = frameSize.Width / 2; _linesizeV = frameSize.Width / 2; _ySize = _linesizeY * frameSize.Height; _uSize = _linesizeU * frameSize.Height / 2; }
/// <summary> /// Create and init video encode /// </summary> /// <param name="videoCodec"></param> /// <param name="flags"><see cref="MediaFormat.Flags"/></param> /// <param name="width">width pixel, must be greater than 0</param> /// <param name="height">height pixel, must be greater than 0</param> /// <param name="fps">fps, must be greater than 0</param> /// <param name="bitRate">default is auto bit rate, must be greater than or equal to 0</param> /// <param name="format">default is first supported pixel format</param> /// <returns></returns> public static MediaEncoder CreateVideoEncode(AVCodecID videoCodec, int flags, int width, int height, int fps, long bitRate = 0, AVPixelFormat format = AVPixelFormat.AV_PIX_FMT_NONE) { return(CreateEncode(videoCodec, flags, _ => { AVCodecContext *pCodecContext = _; if (width <= 0 || height <= 0 || fps <= 0 || bitRate < 0) { throw new FFmpegException(FFmpegException.NonNegative); } if (_.SupportedPixelFmts.Count() <= 0) { throw new FFmpegException(FFmpegException.NotSupportCodecId); } if (format == AVPixelFormat.AV_PIX_FMT_NONE) { format = _.SupportedPixelFmts[0]; } else if (_.SupportedPixelFmts.Where(__ => __ == format).Count() <= 0) { throw new FFmpegException(FFmpegException.NotSupportFormat); } pCodecContext->width = width; pCodecContext->height = height; pCodecContext->time_base = new AVRational { num = 1, den = fps }; pCodecContext->pix_fmt = format; pCodecContext->bit_rate = bitRate; })); }
public EncodingPipeline(string codecName, int srcWidth, int srcHeight, AVPixelFormat srcPxfmt, long?bitrate, AVRational framerate) { var codec = Codec.GetByName(codecName); AVPixelFormat dstPixelFormat; if (codec.isSupported(srcPxfmt)) { dstPixelFormat = srcPxfmt; } else { dstPixelFormat = codec.GetBestPixFmt(srcPxfmt); _log.Info($"[{srcPxfmt}] Pixel format is not supported. The best target format, supported by codec, is '{dstPixelFormat}'."); _converter = new FrameConverter(srcWidth, srcHeight, srcPxfmt, dstPixelFormat); } try { _encoder = new VideoEncoder(srcWidth, srcHeight, dstPixelFormat, bitrate, framerate, codec); } catch { if (_converter != null) { _converter.Dispose(); } throw; } }
public virtual int decode(int[] input, int inputOffset, int inputLength) { packet.data_base = input; packet.data_offset = inputOffset; packet.size = inputLength; int consumedLength; try { consumedLength = context.avcodec_decode_video2(picture, gotPicture, packet); } catch (System.IndexOutOfRangeException e) { Console.WriteLine("H264Decoder.decode", e); return(-1); } if (consumedLength < 0) { Console.WriteLine(string.Format("H264 decode error 0x{0:X8}", consumedLength)); gotPicture[0] = 0; return(consumedLength); } if (hasImage()) { context.priv_data.displayPicture.copyTo(picture); aspectRatio = context.sample_aspect_ratio; } return(consumedLength); }
/// <summary> /// Converts a frame index to a timestamp in the <paramref name="timeBase"/> units. /// </summary> /// <param name="frameNumber">The frame number.</param> /// <param name="fps">The stream frame rate.</param> /// <param name="timeBase">The stream time base.</param> /// <returns>The timestamp.</returns> public static long ToTimestamp(this int frameNumber, AVRational fps, AVRational timeBase) { long num = frameNumber * fps.den * timeBase.den; long den = fps.num * timeBase.num; return(Convert.ToInt64(num / (double)den)); }
public void Encode(IntPtr memory, AVPixelFormat pixelFormat, string file) { var codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_PNG); if (codec == null) { throw new ArgumentException("Failed to find codec", nameof(file)); } var encoderContext = ffmpeg.avcodec_alloc_context3(codec); if (encoderContext == null) { throw new InvalidOperationException("Failed to allocate encoder context"); } try { encoderContext->pix_fmt = PixelFormat; encoderContext->height = Height; encoderContext->width = Width; encoderContext->time_base = new AVRational { den = 1, num = 1 }; ffmpeg.avcodec_open2(encoderContext, codec, options: null).ThrowExceptionIfError(nameof(ffmpeg.avcodec_open2)); var packet = ffmpeg.av_packet_alloc(); var frame = ffmpeg.av_frame_alloc(); try { ffmpeg.av_packet_unref(packet); ffmpeg.av_frame_unref(frame); frame->data[0] = (byte *)memory; frame->width = Width; frame->height = Height; frame->linesize[0] = Width * 3; frame->format = (int)pixelFormat; ffmpeg.avcodec_send_frame(encoderContext, frame).ThrowExceptionIfError(nameof(ffmpeg.avcodec_send_frame)); ffmpeg.avcodec_receive_packet(encoderContext, packet); using var output = File.Create(file); var data = new ReadOnlySpan <byte>(packet->data, packet->size); output.Write(data); } finally { ffmpeg.av_frame_free(&frame); ffmpeg.av_packet_free(&packet); } } finally { ffmpeg.avcodec_free_context(&encoderContext); } }
/// <summary>Create an AVRational.</summary> public static AVRational av_make_q(int @num, int @den) { var r = new AVRational { @num = num, @den = den }; return(r); }
public static double ToDouble(this AVRational rational) { if (rational.den == 0) { return(0); // prevent overflows. } return(Convert.ToDouble(rational.num) / Convert.ToDouble(rational.den)); }
/// <summary>Invert a rational.</summary> /// <param name="q">value</param> /// <returns>1 / q</returns> public static AVRational av_inv_q(AVRational @q) { var r = new AVRational { @num = q.den, @den = q.num }; return(r); }
public void Initialize(int width, int height, int frames_per_second) { Reset(); ffmpeg.avformat_network_init(); fixed(AVFormatContext **c = &formatContext) { if (ffmpeg.avformat_alloc_output_context2(c, null, "flv", null) < 0) { throw new Exception("Could not allocate output format context!"); } } codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264); if (codec == null) { throw new Exception("codec not found!"); } context = ffmpeg.avcodec_alloc_context3(codec); if (context == null) { throw new Exception("alloc context fail"); } context->codec_id = codec->id; context->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO; context->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; context->bit_rate = 400000; context->width = width; context->height = height; context->time_base = new AVRational { num = 1, den = frames_per_second }; context->framerate = new AVRational { num = frames_per_second, den = 1 }; context->gop_size = 50; context->max_b_frames = 1; context->qmin = 10; context->qmax = 50; context->level = 41; context->refs = 1; // context->max_b_frames = 0; // 去掉B帧只留下I帧和P帧 this.frames_per_second = frames_per_second; if (codec->id == AVCodecID.AV_CODEC_ID_H264) { ffmpeg.av_opt_set(context->priv_data, "preset", "slow", 0); // ffmpeg.av_opt_set(context->priv_data, "tune", "zerolatency", 0); // 零延迟 } if ((formatContext->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0) { context->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER; } }
public H264Encoder(string outputPath, Size frameSize) { ffmpeg.RootPath = Path.Join(TestData.SolutionDir, "ffmpeg", "bin"); Console.WriteLine("FFMPEG version: " + ffmpeg.av_version_info()); _frameSize = frameSize; var codecId = AVCodecID.AV_CODEC_ID_H264; _pCodec = ffmpeg.avcodec_find_encoder(codecId); if (_pCodec == null) { throw new InvalidOperationException("Codec not found."); } _pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec); _pCodecContext->width = frameSize.Width; _pCodecContext->height = frameSize.Height; _pCodecContext->time_base = new AVRational { num = 1, den = 1000 }; _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P; ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "superfast", 0); ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError(); _linesizeY = frameSize.Width; _linesizeU = frameSize.Width / 2; _linesizeV = frameSize.Width / 2; _ySize = _linesizeY * frameSize.Height; _uSize = _linesizeU * frameSize.Height / 2; _swsContext = null; // Allocate a frame _frame = ffmpeg.av_frame_alloc(); _frame->width = _pCodecContext->width; _frame->height = _pCodecContext->height; _frame->format = (int)_pCodecContext->pix_fmt; ffmpeg.av_frame_get_buffer(_frame, 32); // Create output context for mp4 AVFormatContext *outputContext; ffmpeg.avformat_alloc_output_context2(&outputContext, null, "mp4", null).ThrowExceptionIfError(); _outputContext = outputContext; ffmpeg.avio_open2(&_outputContext->pb, outputPath, ffmpeg.AVIO_FLAG_WRITE, null, null); // Create video stream in mp4 container _stream = ffmpeg.avformat_new_stream(_outputContext, _pCodec); ffmpeg.avcodec_parameters_from_context(_stream->codecpar, _pCodecContext) .ThrowExceptionIfError(); _stream->sample_aspect_ratio = _pCodecContext->sample_aspect_ratio; _stream->time_base = _pCodecContext->time_base; ffmpeg.avformat_write_header(_outputContext, null); }
private double CalculateFrameRate(AVRational framerate) { if (framerate.den > 0 && framerate.num > 0) { return(framerate); } return(1 / m_avCodecCtx.time_base); }
/// <summary> /// a red cheomekey filter for .png image example. /// <para> /// ffmpeg -i <paramref name="input"/> -vf chromakey=red:0.1:0.0 <paramref name="output"/> /// </para> /// </summary> /// <param name="input"></param> /// <param name="output"></param> public unsafe PngChromekeyFilter(string input, string output) { using (MediaReader reader = new MediaReader(input)) using (MediaWriter writer = new MediaWriter(output)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; // init filter int height = reader[videoIndex].Codec.AVCodecContext.height; int width = reader[videoIndex].Codec.AVCodecContext.width; int format = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt; AVRational time_base = reader[videoIndex].TimeBase; AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio; MediaFilterGraph filterGraph = new MediaFilterGraph(); filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio).LinkTo(0, filterGraph.AddFilter(new MediaFilter("chromakey"), "red:0.1:0.0")).LinkTo(0, filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink))); filterGraph.Initialize(); // add stream by reader and init writer writer.AddStream(reader[videoIndex]); writer.Initialize(); // init video frame format converter by dstcodec PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); foreach (var srcPacket in reader.ReadPacket()) { foreach (var srcFrame in reader[videoIndex].ReadFrame(srcPacket)) { filterGraph.Inputs.First().WriteFrame(srcFrame); foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame()) { // can use filterFrame.ToMat() gets the output image directly without the need for a writer. //using EmguFFmpeg.EmguCV; //using (var mat = filterFrame.ToMat()) //{ // mat.Save(output); //} foreach (var dstFrame in pixelConverter.Convert(filterFrame)) { foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } } // flush codec cache writer.FlushMuxer(); } }
public static extern System.Int64 av_rescale_delta( AVRational in_tb, [MarshalAs(UnmanagedType.I8)] System.Int64 in_ts, AVRational fs_tb, [MarshalAs(UnmanagedType.I4)] System.Int32 duration, IntPtr /* System.Int64* */ last, AVRational out_tb);
private unsafe EncodingPipeline createPipeline(AVFrame *frame, AVRational framerate) { return(new EncodingPipeline( "libx264", frame->width, frame->height, (AVPixelFormat)frame->format, _bitrate, framerate)); }
/// <summary> /// Initializes a new instance of the <see cref="VideoSeekIndexEntry" /> class. /// </summary> /// <param name="streamIndex">Index of the stream.</param> /// <param name="timeBaseNum">The time base numerator.</param> /// <param name="timeBaseDen">The time base deonominator.</param> /// <param name="startTimeTicks">The start time ticks.</param> /// <param name="presentationTime">The presentation time.</param> /// <param name="decodingTime">The decoding time.</param> internal VideoSeekIndexEntry(int streamIndex, int timeBaseNum, int timeBaseDen, long startTimeTicks, long presentationTime, long decodingTime) { StreamIndex = streamIndex; StartTime = TimeSpan.FromTicks(startTimeTicks); PresentationTime = presentationTime; DecodingTime = decodingTime; StreamTimeBase = new AVRational { num = timeBaseNum, den = timeBaseDen }; }
public bool TryDecodeFrame(out AVFrame frame, TimeSpan position) { ffmpeg.av_frame_unref(_pFrame); ffmpeg.av_frame_unref(_receivedFrame); int error; AVRational timebase = _pFormatContext->streams[_streamIndex]->time_base; float AV_TIME_BASE = (float)timebase.den / timebase.num; long tc = Convert.ToInt64(position.TotalSeconds * AV_TIME_BASE); if (ffmpeg.av_seek_frame(_pFormatContext, _streamIndex, tc, ffmpeg.AVSEEK_FLAG_BACKWARD) < 0) { ffmpeg.av_seek_frame(_pFormatContext, _streamIndex, tc, ffmpeg.AVSEEK_FLAG_ANY).ThrowExceptionIfError(); } do { try { do { ffmpeg.av_packet_unref(_pPacket); error = ffmpeg.av_read_frame(_pFormatContext, _pPacket); if (error == ffmpeg.AVERROR_EOF) { frame = *_pFrame; return(false); } error.ThrowExceptionIfError(); } while (_pPacket->stream_index != _streamIndex); ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError(); } finally { ffmpeg.av_packet_unref(_pPacket); } error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame); } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN)); error.ThrowExceptionIfError(); if (_pCodecContext->hw_device_ctx != null) { ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0).ThrowExceptionIfError(); frame = *_receivedFrame; } else { frame = *_pFrame; } return(true); }
public void Numerator_ReturnsNativeNumerator() { var nativeRational = new NativeAVRational { num = 4, }; var rational = new AVRational(&nativeRational); Assert.Equal(4, rational.Numerator); }
public void Denominator_ReturnsNativeDenominator() { var nativeRational = new NativeAVRational { den = 100, }; var rational = new AVRational(&nativeRational); Assert.Equal(100, rational.Denominator); }
public static TimeSpan ToTimeSpan(this double pts, AVRational timeBase) { if (double.IsNaN(pts) || Math.Abs(pts - ffmpeg.AV_NOPTS_VALUE) <= double.Epsilon) { return(TimeSpan.MinValue); } return(TimeSpan.FromTicks(timeBase.den == 0 ? Convert.ToInt64(TimeSpan.TicksPerMillisecond * 1000 * pts / ffmpeg.AV_TIME_BASE) : Convert.ToInt64(TimeSpan.TicksPerMillisecond * 1000 * pts * timeBase.num / timeBase.den))); }
public AVRational[] ToArray() { fixed(AVRational *p0 = &_0) { var a = new AVRational[Size]; for (uint i = 0; i < Size; i++) { a[i] = *(p0 + i); } return(a); } }
/// <summary> /// Gets a timespan given a timestamp and a timebase. /// </summary> /// <param name="pts">The PTS.</param> /// <param name="timeBase">The time base.</param> /// <returns></returns> public static TimeSpan ToTimeSpan(this double pts, AVRational timeBase) { if (double.IsNaN(pts) || pts == Constants.AV_NOPTS) { return(TimeSpan.MinValue); } if (timeBase.den == 0) { return(TimeSpan.FromTicks((long)(TimeSpan.TicksPerMillisecond * 1000 * pts / ffmpeg.AV_TIME_BASE))); //) .FromSeconds(pts / ffmpeg.AV_TIME_BASE); } return(TimeSpan.FromTicks((long)(TimeSpan.TicksPerMillisecond * 1000 * pts * timeBase.num / timeBase.den))); //pts * timeBase.num / timeBase.den); }
// Code from rational.h /** * Compare two rationals. * @param a first rational * @param b second rational * @return 0 if a==b, 1 if a>b and -1 if a<b. */ public static int av_cmp_q(AVRational a, AVRational b) { long tmp = a.num * (long)b.den - b.num * (long)a.den; if (tmp > 0) { return((int)((tmp >> 63) | 1)); } else { return(0); } }
public static extern AVOption* av_set_q(void* obj, string name, AVRational n);
public static extern AVRational av_sub_q(AVRational b, AVRational c);
/** * Rational to double conversion. * @param a rational to convert * @return (double) a */ public static double av_q2d(AVRational a) { return a.num / (double)a.den; }
public static extern System.Int64 av_rescale_q_rnd( [MarshalAs(UnmanagedType.I8)] System.Int64 a, AVRational bq, AVRational cq, AVRounding __arg3);
public static extern System.Int32 av_nearer_q( AVRational q, AVRational q1, AVRational q2);
public static extern System.Int32 av_cmp_q( AVRational a, AVRational b);
private double CalculateFrameRate(AVRational framerate) { if (framerate.den > 0 && framerate.num > 0) return framerate; return 1 / m_avCodecCtx.time_base; }
public static extern System.Int64 av_rescale_delta( AVRational in_tb, [MarshalAs(UnmanagedType.I8)] System.Int64 in_ts, AVRational fs_tb, [MarshalAs(UnmanagedType.I4)] System.Int32 duration, IntPtr/* System.Int64* */ last, AVRational out_tb);
public static extern System.Int32 av_find_nearest_q_idx( AVRational q, IntPtr/* AVRational* */ q_list);
public static extern void av_stream_set_r_frame_rate( IntPtr/* AVStream* */ s, AVRational r);
public static extern System.Int64 av_add_stable( AVRational ts_tb, [MarshalAs(UnmanagedType.I8)] System.Int64 ts, AVRational inc_tb, [MarshalAs(UnmanagedType.I8)] System.Int64 inc);
public static extern System.Int64 av_rescale_q( [MarshalAs(UnmanagedType.I8)] System.Int64 a, AVRational bq, AVRational cq);
public static extern System.Int32 av_opt_set_video_rate( IntPtr/* void* */ obj, [MarshalAs(UnmanagedType.LPStr)] string name, AVRational val, [MarshalAs(UnmanagedType.I4)] System.Int32 search_flags);
public static extern AVRational av_inv_q( AVRational q);
public static extern AVRational av_add_q(AVRational b, AVRational c);
public static extern void av_codec_set_pkt_timebase( IntPtr/* AVCodecContext* */ avctx, AVRational val);
public static double av_q2d(AVRational a) { return (a.num/((double) a.den)); }
public static extern AVRational av_div_q(AVRational b, AVRational c);
public static extern IntPtr/* AVOption* */ av_set_q( IntPtr/* void* */ obj, [MarshalAs(UnmanagedType.LPStr)] string name, AVRational n);
public static extern AVRational av_mul_q(AVRational b, AVRational c);
public static extern int av_parse_video_frame_rate(ref AVRational frame_rate, string str);
public static extern System.Int32 av_compare_ts( [MarshalAs(UnmanagedType.I8)] System.Int64 ts_a, AVRational tb_a, [MarshalAs(UnmanagedType.I8)] System.Int64 ts_b, AVRational tb_b);
public static extern long av_rescale_q(long a, AVRational bq, AVRational cq);
public static extern System.Double av_q2d( AVRational a);