// sets up libavformat state: creates the AVFormatContext, the frames, etc. to start decoding, but does not actually start the decodingLoop private void prepareDecoding() { const int context_buffer_size = 4096; readPacketCallback = readPacket; seekCallback = streamSeekCallbacks; // we shouldn't keep a reference to this buffer as it can be freed and replaced by the native libs themselves. // https://ffmpeg.org/doxygen/4.1/aviobuf_8c.html#a853f5149136a27ffba3207d8520172a5 byte *contextBuffer = (byte *)ffmpeg.av_malloc(context_buffer_size); ioContext = ffmpeg.avio_alloc_context(contextBuffer, context_buffer_size, 0, (void *)handle.Handle, readPacketCallback, null, seekCallback); var fcPtr = ffmpeg.avformat_alloc_context(); formatContext = fcPtr; formatContext->pb = ioContext; formatContext->flags |= AGffmpeg.AVFMT_FLAG_GENPTS; // required for most HW decoders as they only read `pts` int openInputResult = ffmpeg.avformat_open_input(&fcPtr, "dummy", null, null); inputOpened = openInputResult >= 0; if (!inputOpened) { throw new InvalidOperationException($"Error opening file or stream: {getErrorMessage(openInputResult)}"); } int findStreamInfoResult = ffmpeg.avformat_find_stream_info(formatContext, null); if (findStreamInfoResult < 0) { throw new InvalidOperationException($"Error finding stream info: {getErrorMessage(findStreamInfoResult)}"); } int streamIndex = ffmpeg.av_find_best_stream(formatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, null, 0); if (streamIndex < 0) { throw new InvalidOperationException($"Couldn't find video stream: {getErrorMessage(streamIndex)}"); } stream = formatContext->streams[streamIndex]; timeBaseInSeconds = stream->time_base.GetValue(); if (stream->duration > 0) { Duration = stream->duration * timeBaseInSeconds * 1000; } else { Duration = formatContext->duration / AGffmpeg.AV_TIME_BASE * 1000; } }