Example #1
0
        private void readDecodedFrames(AVFrame *receiveFrame)
        {
            while (true)
            {
                int receiveFrameResult = ffmpeg.avcodec_receive_frame(codecContext, receiveFrame);

                if (receiveFrameResult < 0)
                {
                    if (receiveFrameResult != -AGffmpeg.EAGAIN && receiveFrameResult != AGffmpeg.AVERROR_EOF)
                    {
                        Logger.Log($"Failed to receive frame from avcodec: {getErrorMessage(receiveFrameResult)}");
                    }

                    break;
                }

                // use `best_effort_timestamp` as it can be more accurate if timestamps from the source file (pts) are broken.
                // but some HW codecs don't set it in which case fallback to `pts`
                long frameTimestamp = receiveFrame->best_effort_timestamp != AGffmpeg.AV_NOPTS_VALUE ? receiveFrame->best_effort_timestamp : receiveFrame->pts;

                double frameTime = (frameTimestamp - stream->start_time) * timeBaseInSeconds * 1000;

                if (skipOutputUntilTime > frameTime)
                {
                    continue;
                }

                // get final frame.
                FFmpegFrame frame;

                if (((AVPixelFormat)receiveFrame->format).IsHardwarePixelFormat())
                {
                    // transfer data from HW decoder to RAM.
                    if (!hwTransferFrames.TryDequeue(out var hwTransferFrame))
                    {
                        hwTransferFrame = new FFmpegFrame(ffmpeg, returnHwTransferFrame);
                    }

                    // WARNING: frames from `av_hwframe_transfer_data` have their timestamps set to AV_NOPTS_VALUE instead of real values.
                    // if you need to use them later, take them from `receiveFrame`.
                    int transferResult = ffmpeg.av_hwframe_transfer_data(hwTransferFrame.Pointer, receiveFrame, 0);

                    if (transferResult < 0)
                    {
                        Logger.Log($"Failed to transfer frame from HW decoder: {getErrorMessage(transferResult)}");

                        // dispose of the frame instead of enqueueing it in case that the failure was caused by it's configuration.
                        hwTransferFrame.Dispose();
                        continue;
                    }

                    frame = hwTransferFrame;
                }
                else
                {
                    // copy data to a new AVFrame so that `receiveFrame` can be reused.
                    frame = new FFmpegFrame(ffmpeg);
                    ffmpeg.av_frame_move_ref(frame.Pointer, receiveFrame);
                }

                lastDecodedFrameTime = (float)frameTime;

                // Note: this is the pixel format that `VideoTexture` expects internally
                frame = ensureFramePixelFormat(frame, AVPixelFormat.AV_PIX_FMT_YUV420P);
                if (frame == null)
                {
                    continue;
                }

                if (!availableTextures.TryDequeue(out var tex))
                {
                    tex = new Texture(new VideoTexture(frame.Pointer->width, frame.Pointer->height));
                }

                var upload = new VideoTextureUpload(frame);

                tex.SetData(upload);
                decodedFrames.Enqueue(new DecodedFrame {
                    Time = frameTime, Texture = tex
                });
            }
        }