Beispiel #1
0
        private void readDecodedFrames(AVFrame *receiveFrame)
        {
            while (true)
            {
                int receiveFrameResult = ffmpeg.avcodec_receive_frame(codecContext, receiveFrame);

                if (receiveFrameResult < 0)
                {
                    if (receiveFrameResult != -AGffmpeg.EAGAIN && receiveFrameResult != AGffmpeg.AVERROR_EOF)
                    {
                        Logger.Log($"Failed to receive frame from avcodec: {getErrorMessage(receiveFrameResult)}");
                    }

                    break;
                }

                // use `best_effort_timestamp` as it can be more accurate if timestamps from the source file (pts) are broken.
                // but some HW codecs don't set it in which case fallback to `pts`
                long frameTimestamp = receiveFrame->best_effort_timestamp != AGffmpeg.AV_NOPTS_VALUE ? receiveFrame->best_effort_timestamp : receiveFrame->pts;

                double frameTime = (frameTimestamp - stream->start_time) * timeBaseInSeconds * 1000;

                if (skipOutputUntilTime > frameTime)
                {
                    continue;
                }

                // get final frame.
                FFmpegFrame frame;

                if (((AVPixelFormat)receiveFrame->format).IsHardwarePixelFormat())
                {
                    // transfer data from HW decoder to RAM.
                    if (!hwTransferFrames.TryDequeue(out var hwTransferFrame))
                    {
                        hwTransferFrame = new FFmpegFrame(ffmpeg, returnHwTransferFrame);
                    }

                    // WARNING: frames from `av_hwframe_transfer_data` have their timestamps set to AV_NOPTS_VALUE instead of real values.
                    // if you need to use them later, take them from `receiveFrame`.
                    int transferResult = ffmpeg.av_hwframe_transfer_data(hwTransferFrame.Pointer, receiveFrame, 0);

                    if (transferResult < 0)
                    {
                        Logger.Log($"Failed to transfer frame from HW decoder: {getErrorMessage(transferResult)}");

                        // dispose of the frame instead of enqueueing it in case that the failure was caused by it's configuration.
                        hwTransferFrame.Dispose();
                        continue;
                    }

                    frame = hwTransferFrame;
                }
                else
                {
                    // copy data to a new AVFrame so that `receiveFrame` can be reused.
                    frame = new FFmpegFrame(ffmpeg);
                    ffmpeg.av_frame_move_ref(frame.Pointer, receiveFrame);
                }

                lastDecodedFrameTime = (float)frameTime;

                // Note: this is the pixel format that `VideoTexture` expects internally
                frame = ensureFramePixelFormat(frame, AVPixelFormat.AV_PIX_FMT_YUV420P);
                if (frame == null)
                {
                    continue;
                }

                if (!availableTextures.TryDequeue(out var tex))
                {
                    tex = new Texture(new VideoTexture(frame.Pointer->width, frame.Pointer->height));
                }

                var upload = new VideoTextureUpload(frame);

                tex.SetData(upload);
                decodedFrames.Enqueue(new DecodedFrame {
                    Time = frameTime, Texture = tex
                });
            }
        }
Beispiel #2
0
        private void decodeNextFrame(AVPacket *packet)
        {
            int readFrameResult = ffmpeg.av_read_frame(formatContext, packet);

            if (readFrameResult >= 0)
            {
                State = DecoderState.Running;

                if (packet->stream_index == stream->index)
                {
                    int sendPacketResult = ffmpeg.avcodec_send_packet(stream->codec, packet);

                    if (sendPacketResult == 0)
                    {
                        AVFrame *frame    = ffmpeg.av_frame_alloc();
                        AVFrame *outFrame = null;

                        var result = ffmpeg.avcodec_receive_frame(stream->codec, frame);

                        if (result == 0)
                        {
                            var frameTime = (frame->best_effort_timestamp - stream->start_time) * timeBaseInSeconds * 1000;

                            if (!skipOutputUntilTime.HasValue || skipOutputUntilTime.Value < frameTime)
                            {
                                skipOutputUntilTime = null;

                                if (convert)
                                {
                                    outFrame         = ffmpeg.av_frame_alloc();
                                    outFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
                                    outFrame->width  = stream->codec->width;
                                    outFrame->height = stream->codec->height;

                                    var ret = ffmpeg.av_frame_get_buffer(outFrame, 32);
                                    if (ret < 0)
                                    {
                                        throw new InvalidOperationException($"Error allocating video frame: {getErrorMessage(ret)}");
                                    }

                                    ffmpeg.sws_scale(convCtx, frame->data, frame->linesize, 0, stream->codec->height,
                                                     outFrame->data, outFrame->linesize);
                                }
                                else
                                {
                                    outFrame = frame;
                                }

                                if (!availableTextures.TryDequeue(out var tex))
                                {
                                    tex = new Texture(new VideoTexture(codecParams.width, codecParams.height));
                                }

                                var upload = new VideoTextureUpload(outFrame, ffmpeg.av_frame_free);

                                tex.SetData(upload);
                                decodedFrames.Enqueue(new DecodedFrame {
                                    Time = frameTime, Texture = tex
                                });
                            }

                            lastDecodedFrameTime = (float)frameTime;
                        }

                        // There are two cases: outFrame could be null in which case the above decode hasn't run, or the outFrame doesn't match the input frame,
                        // in which case it won't be automatically freed by the texture upload. In both cases we need to free the input frame.
                        if (outFrame != frame)
                        {
                            ffmpeg.av_frame_free(&frame);
                        }
                    }
                    else
                    {
                        Logger.Log($"Error {sendPacketResult} sending packet in VideoDecoder");
                    }
                }

                ffmpeg.av_packet_unref(packet);
            }
            else if (readFrameResult == AGffmpeg.AVERROR_EOF)
            {
                if (Looping)
                {
                    Seek(0);
                }
                else
                {
                    // This marks the video stream as no longer relevant (until a future potential Seek operation).
                    State = DecoderState.EndOfStream;
                }
            }
            else
            {
                State = DecoderState.Ready;
                Thread.Sleep(1);
            }
        }
        private void decodingLoop(CancellationToken cancellationToken)
        {
            var packet = ffmpeg.av_packet_alloc();

            const int max_pending_frames = 3;

            try
            {
                while (true)
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        return;
                    }

                    if (decodedFrames.Count < max_pending_frames)
                    {
                        int readFrameResult = ffmpeg.av_read_frame(formatContext, packet);

                        if (readFrameResult >= 0)
                        {
                            State = DecoderState.Running;

                            if (packet->stream_index == stream->index)
                            {
                                int sendPacketResult = ffmpeg.avcodec_send_packet(stream->codec, packet);

                                if (sendPacketResult == 0)
                                {
                                    AVFrame *frame    = ffmpeg.av_frame_alloc();
                                    AVFrame *outFrame = null;

                                    var result = ffmpeg.avcodec_receive_frame(stream->codec, frame);

                                    if (result == 0)
                                    {
                                        var frameTime = (frame->best_effort_timestamp - stream->start_time) * timeBaseInSeconds * 1000;

                                        if (!skipOutputUntilTime.HasValue || skipOutputUntilTime.Value < frameTime)
                                        {
                                            skipOutputUntilTime = null;

                                            if (convert)
                                            {
                                                outFrame         = ffmpeg.av_frame_alloc();
                                                outFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
                                                outFrame->width  = stream->codec->width;
                                                outFrame->height = stream->codec->height;

                                                var ret = ffmpeg.av_frame_get_buffer(outFrame, 32);
                                                if (ret < 0)
                                                {
                                                    throw new InvalidOperationException($"Error allocating video frame: {getErrorMessage(ret)}");
                                                }

                                                ffmpeg.sws_scale(convCtx, frame->data, frame->linesize, 0, stream->codec->height,
                                                                 outFrame->data, outFrame->linesize);
                                            }
                                            else
                                            {
                                                outFrame = frame;
                                            }

                                            if (!availableTextures.TryDequeue(out var tex))
                                            {
                                                tex = new Texture(new VideoTexture(codecParams.width, codecParams.height));
                                            }

                                            var upload = new VideoTextureUpload(outFrame, ffmpeg.av_frame_free);

                                            tex.SetData(upload);
                                            decodedFrames.Enqueue(new DecodedFrame {
                                                Time = frameTime, Texture = tex
                                            });
                                        }

                                        lastDecodedFrameTime = (float)frameTime;
                                    }

                                    // There are two cases: outFrame could be null in which case the above decode hasn't run, or the outFrame doesn't match the input frame,
                                    // in which case it won't be automatically freed by the texture upload. In both cases we need to free the input frame.
                                    if (outFrame != frame)
                                    {
                                        ffmpeg.av_frame_free(&frame);
                                    }
                                }
                                else
                                {
                                    Logger.Log($"Error {sendPacketResult} sending packet in VideoDecoder");
                                }
                            }

                            ffmpeg.av_packet_unref(packet);
                        }
                        else if (readFrameResult == AGffmpeg.AVERROR_EOF)
                        {
                            if (Looping)
                            {
                                Seek(0);
                            }
                            else
                            {
                                State = DecoderState.EndOfStream;
                            }
                        }
                        else
                        {
                            State = DecoderState.Ready;
                            Thread.Sleep(1);
                        }
                    }
                    else
                    {
                        // wait until existing buffers are consumed.
                        State = DecoderState.Ready;
                        Thread.Sleep(1);
                    }

                    while (!decoderCommands.IsEmpty)
                    {
                        if (cancellationToken.IsCancellationRequested)
                        {
                            return;
                        }

                        if (decoderCommands.TryDequeue(out var cmd))
                        {
                            cmd();
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Logger.Log($"VideoDecoder faulted: {e}");
                State = DecoderState.Faulted;
            }
            finally
            {
                ffmpeg.av_packet_free(&packet);

                if (State != DecoderState.Faulted)
                {
                    State = DecoderState.Stopped;
                }
            }
        }