Example #1
0
        private int sendPacket(AVFrame *receiveFrame, AVPacket *packet)
        {
            // send the packet for decoding.
            int sendPacketResult = ffmpeg.avcodec_send_packet(codecContext, packet);

            // Note: EAGAIN can be returned if there's too many pending frames, which we have to read,
            // otherwise we would get stuck in an infinite loop.
            if (sendPacketResult == 0 || sendPacketResult == -AGffmpeg.EAGAIN)
            {
                readDecodedFrames(receiveFrame);
            }
            else
            {
                Logger.Log($"Failed to send avcodec packet: {getErrorMessage(sendPacketResult)}");
                tryDisableHwDecoding(sendPacketResult);
            }

            return(sendPacketResult);
        }
Example #2
0
        private void decodeNextFrame(AVPacket *packet)
        {
            int readFrameResult = ffmpeg.av_read_frame(formatContext, packet);

            if (readFrameResult >= 0)
            {
                State = DecoderState.Running;

                if (packet->stream_index == stream->index)
                {
                    int sendPacketResult = ffmpeg.avcodec_send_packet(stream->codec, packet);

                    if (sendPacketResult == 0)
                    {
                        AVFrame *frame    = ffmpeg.av_frame_alloc();
                        AVFrame *outFrame = null;

                        var result = ffmpeg.avcodec_receive_frame(stream->codec, frame);

                        if (result == 0)
                        {
                            var frameTime = (frame->best_effort_timestamp - stream->start_time) * timeBaseInSeconds * 1000;

                            if (!skipOutputUntilTime.HasValue || skipOutputUntilTime.Value < frameTime)
                            {
                                skipOutputUntilTime = null;

                                if (convert)
                                {
                                    outFrame         = ffmpeg.av_frame_alloc();
                                    outFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
                                    outFrame->width  = stream->codec->width;
                                    outFrame->height = stream->codec->height;

                                    var ret = ffmpeg.av_frame_get_buffer(outFrame, 32);
                                    if (ret < 0)
                                    {
                                        throw new InvalidOperationException($"Error allocating video frame: {getErrorMessage(ret)}");
                                    }

                                    ffmpeg.sws_scale(convCtx, frame->data, frame->linesize, 0, stream->codec->height,
                                                     outFrame->data, outFrame->linesize);
                                }
                                else
                                {
                                    outFrame = frame;
                                }

                                if (!availableTextures.TryDequeue(out var tex))
                                {
                                    tex = new Texture(new VideoTexture(codecParams.width, codecParams.height));
                                }

                                var upload = new VideoTextureUpload(outFrame, ffmpeg.av_frame_free);

                                tex.SetData(upload);
                                decodedFrames.Enqueue(new DecodedFrame {
                                    Time = frameTime, Texture = tex
                                });
                            }

                            lastDecodedFrameTime = (float)frameTime;
                        }

                        // There are two cases: outFrame could be null in which case the above decode hasn't run, or the outFrame doesn't match the input frame,
                        // in which case it won't be automatically freed by the texture upload. In both cases we need to free the input frame.
                        if (outFrame != frame)
                        {
                            ffmpeg.av_frame_free(&frame);
                        }
                    }
                    else
                    {
                        Logger.Log($"Error {sendPacketResult} sending packet in VideoDecoder");
                    }
                }

                ffmpeg.av_packet_unref(packet);
            }
            else if (readFrameResult == AGffmpeg.AVERROR_EOF)
            {
                if (Looping)
                {
                    Seek(0);
                }
                else
                {
                    // This marks the video stream as no longer relevant (until a future potential Seek operation).
                    State = DecoderState.EndOfStream;
                }
            }
            else
            {
                State = DecoderState.Ready;
                Thread.Sleep(1);
            }
        }
Example #3
0
        private void decodeNextFrame(AVPacket *packet, AVFrame *receiveFrame)
        {
            // read data from input into AVPacket.
            // only read if the packet is empty, otherwise we would overwrite what's already there which can lead to visual glitches.
            int readFrameResult = 0;

            if (packet->buf == null)
            {
                readFrameResult = ffmpeg.av_read_frame(formatContext, packet);
            }

            if (readFrameResult >= 0)
            {
                State = DecoderState.Running;

                bool unrefPacket = true;

                if (packet->stream_index == stream->index)
                {
                    // send the packet for decoding.
                    int sendPacketResult = ffmpeg.avcodec_send_packet(codecContext, packet);

                    // Note: EAGAIN can be returned if there's too many pending frames, which we have to read,
                    // otherwise we would get stuck in an infinite loop.
                    if (sendPacketResult == 0 || sendPacketResult == -AGffmpeg.EAGAIN)
                    {
                        readDecodedFrames(receiveFrame);

                        // keep the packet data for next frame if we didn't send it successfully.
                        if (sendPacketResult != 0)
                        {
                            unrefPacket = false;
                        }
                    }
                    else
                    {
                        Logger.Log($"Failed to send avcodec packet: {getErrorMessage(sendPacketResult)}");
                    }
                }

                if (unrefPacket)
                {
                    ffmpeg.av_packet_unref(packet);
                }
            }
            else if (readFrameResult == AGffmpeg.AVERROR_EOF)
            {
                if (Looping)
                {
                    Seek(0);
                }
                else
                {
                    // This marks the video stream as no longer relevant (until a future potential Seek operation).
                    State = DecoderState.EndOfStream;
                }
            }
            else if (readFrameResult == -AGffmpeg.EAGAIN)
            {
                State = DecoderState.Ready;
                Thread.Sleep(1);
            }
            else
            {
                Logger.Log($"Failed to read data into avcodec packet: {getErrorMessage(readFrameResult)}");
                Thread.Sleep(1);
            }
        }
        private void decodingLoop(CancellationToken cancellationToken)
        {
            var packet = ffmpeg.av_packet_alloc();

            const int max_pending_frames = 3;

            try
            {
                while (true)
                {
                    if (cancellationToken.IsCancellationRequested)
                    {
                        return;
                    }

                    if (decodedFrames.Count < max_pending_frames)
                    {
                        int readFrameResult = ffmpeg.av_read_frame(formatContext, packet);

                        if (readFrameResult >= 0)
                        {
                            State = DecoderState.Running;

                            if (packet->stream_index == stream->index)
                            {
                                int sendPacketResult = ffmpeg.avcodec_send_packet(stream->codec, packet);

                                if (sendPacketResult == 0)
                                {
                                    AVFrame *frame    = ffmpeg.av_frame_alloc();
                                    AVFrame *outFrame = null;

                                    var result = ffmpeg.avcodec_receive_frame(stream->codec, frame);

                                    if (result == 0)
                                    {
                                        var frameTime = (frame->best_effort_timestamp - stream->start_time) * timeBaseInSeconds * 1000;

                                        if (!skipOutputUntilTime.HasValue || skipOutputUntilTime.Value < frameTime)
                                        {
                                            skipOutputUntilTime = null;

                                            if (convert)
                                            {
                                                outFrame         = ffmpeg.av_frame_alloc();
                                                outFrame->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
                                                outFrame->width  = stream->codec->width;
                                                outFrame->height = stream->codec->height;

                                                var ret = ffmpeg.av_frame_get_buffer(outFrame, 32);
                                                if (ret < 0)
                                                {
                                                    throw new InvalidOperationException($"Error allocating video frame: {getErrorMessage(ret)}");
                                                }

                                                ffmpeg.sws_scale(convCtx, frame->data, frame->linesize, 0, stream->codec->height,
                                                                 outFrame->data, outFrame->linesize);
                                            }
                                            else
                                            {
                                                outFrame = frame;
                                            }

                                            if (!availableTextures.TryDequeue(out var tex))
                                            {
                                                tex = new Texture(new VideoTexture(codecParams.width, codecParams.height));
                                            }

                                            var upload = new VideoTextureUpload(outFrame, ffmpeg.av_frame_free);

                                            tex.SetData(upload);
                                            decodedFrames.Enqueue(new DecodedFrame {
                                                Time = frameTime, Texture = tex
                                            });
                                        }

                                        lastDecodedFrameTime = (float)frameTime;
                                    }

                                    // There are two cases: outFrame could be null in which case the above decode hasn't run, or the outFrame doesn't match the input frame,
                                    // in which case it won't be automatically freed by the texture upload. In both cases we need to free the input frame.
                                    if (outFrame != frame)
                                    {
                                        ffmpeg.av_frame_free(&frame);
                                    }
                                }
                                else
                                {
                                    Logger.Log($"Error {sendPacketResult} sending packet in VideoDecoder");
                                }
                            }

                            ffmpeg.av_packet_unref(packet);
                        }
                        else if (readFrameResult == AGffmpeg.AVERROR_EOF)
                        {
                            if (Looping)
                            {
                                Seek(0);
                            }
                            else
                            {
                                State = DecoderState.EndOfStream;
                            }
                        }
                        else
                        {
                            State = DecoderState.Ready;
                            Thread.Sleep(1);
                        }
                    }
                    else
                    {
                        // wait until existing buffers are consumed.
                        State = DecoderState.Ready;
                        Thread.Sleep(1);
                    }

                    while (!decoderCommands.IsEmpty)
                    {
                        if (cancellationToken.IsCancellationRequested)
                        {
                            return;
                        }

                        if (decoderCommands.TryDequeue(out var cmd))
                        {
                            cmd();
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Logger.Log($"VideoDecoder faulted: {e}");
                State = DecoderState.Faulted;
            }
            finally
            {
                ffmpeg.av_packet_free(&packet);

                if (State != DecoderState.Faulted)
                {
                    State = DecoderState.Stopped;
                }
            }
        }