Ejemplo n.º 1
0
 internal VideoTextureUpload(FFmpegFrame ffmpegFrame)
 {
     this.ffmpegFrame = ffmpegFrame;
 }
Ejemplo n.º 2
0
        private FFmpegFrame ensureFramePixelFormat(FFmpegFrame frame, AVPixelFormat targetPixelFormat)
        {
            if (frame.PixelFormat == targetPixelFormat)
            {
                return(frame);
            }

            int width  = frame.Pointer->width;
            int height = frame.Pointer->height;

            swsContext = ffmpeg.sws_getCachedContext(
                swsContext,
                width, height, frame.PixelFormat,
                width, height, targetPixelFormat,
                1, null, null, null);

            if (!scalerFrames.TryDequeue(out var scalerFrame))
            {
                scalerFrame = new FFmpegFrame(ffmpeg, returnScalerFrame);
            }

            // (re)initialize the scaler frame if needed.
            if (scalerFrame.PixelFormat != targetPixelFormat || scalerFrame.Pointer->width != width || scalerFrame.Pointer->height != height)
            {
                ffmpeg.av_frame_unref(scalerFrame.Pointer);

                // Note: this field determines the scaler's output pix format.
                scalerFrame.PixelFormat     = targetPixelFormat;
                scalerFrame.Pointer->width  = width;
                scalerFrame.Pointer->height = height;

                int getBufferResult = ffmpeg.av_frame_get_buffer(scalerFrame.Pointer, 0);

                if (getBufferResult < 0)
                {
                    Logger.Log($"Failed to allocate SWS frame buffer: {getErrorMessage(getBufferResult)}");

                    scalerFrame.Dispose();
                    frame.Return();
                    return(null);
                }
            }

            int scalerResult = ffmpeg.sws_scale(
                swsContext,
                frame.Pointer->data, frame.Pointer->linesize, 0, height,
                scalerFrame.Pointer->data, scalerFrame.Pointer->linesize);

            // return the original frame regardless of the scaler result.
            frame.Return();

            if (scalerResult < 0)
            {
                Logger.Log($"Failed to scale frame: {getErrorMessage(scalerResult)}");

                scalerFrame.Dispose();
                return(null);
            }

            return(scalerFrame);
        }
Ejemplo n.º 3
0
        private void readDecodedFrames(AVFrame *receiveFrame)
        {
            while (true)
            {
                int receiveFrameResult = ffmpeg.avcodec_receive_frame(codecContext, receiveFrame);

                if (receiveFrameResult < 0)
                {
                    if (receiveFrameResult != -AGffmpeg.EAGAIN && receiveFrameResult != AGffmpeg.AVERROR_EOF)
                    {
                        Logger.Log($"Failed to receive frame from avcodec: {getErrorMessage(receiveFrameResult)}");
                    }

                    break;
                }

                // use `best_effort_timestamp` as it can be more accurate if timestamps from the source file (pts) are broken.
                // but some HW codecs don't set it in which case fallback to `pts`
                long frameTimestamp = receiveFrame->best_effort_timestamp != AGffmpeg.AV_NOPTS_VALUE ? receiveFrame->best_effort_timestamp : receiveFrame->pts;

                double frameTime = (frameTimestamp - stream->start_time) * timeBaseInSeconds * 1000;

                if (skipOutputUntilTime > frameTime)
                {
                    continue;
                }

                // get final frame.
                FFmpegFrame frame;

                if (((AVPixelFormat)receiveFrame->format).IsHardwarePixelFormat())
                {
                    // transfer data from HW decoder to RAM.
                    if (!hwTransferFrames.TryDequeue(out var hwTransferFrame))
                    {
                        hwTransferFrame = new FFmpegFrame(ffmpeg, returnHwTransferFrame);
                    }

                    // WARNING: frames from `av_hwframe_transfer_data` have their timestamps set to AV_NOPTS_VALUE instead of real values.
                    // if you need to use them later, take them from `receiveFrame`.
                    int transferResult = ffmpeg.av_hwframe_transfer_data(hwTransferFrame.Pointer, receiveFrame, 0);

                    if (transferResult < 0)
                    {
                        Logger.Log($"Failed to transfer frame from HW decoder: {getErrorMessage(transferResult)}");

                        // dispose of the frame instead of enqueueing it in case that the failure was caused by it's configuration.
                        hwTransferFrame.Dispose();
                        continue;
                    }

                    frame = hwTransferFrame;
                }
                else
                {
                    // copy data to a new AVFrame so that `receiveFrame` can be reused.
                    frame = new FFmpegFrame(ffmpeg);
                    ffmpeg.av_frame_move_ref(frame.Pointer, receiveFrame);
                }

                lastDecodedFrameTime = (float)frameTime;

                // Note: this is the pixel format that `VideoTexture` expects internally
                frame = ensureFramePixelFormat(frame, AVPixelFormat.AV_PIX_FMT_YUV420P);
                if (frame == null)
                {
                    continue;
                }

                if (!availableTextures.TryDequeue(out var tex))
                {
                    tex = new Texture(new VideoTexture(frame.Pointer->width, frame.Pointer->height));
                }

                var upload = new VideoTextureUpload(frame);

                tex.SetData(upload);
                decodedFrames.Enqueue(new DecodedFrame {
                    Time = frameTime, Texture = tex
                });
            }
        }
Ejemplo n.º 4
0
 private void returnScalerFrame(FFmpegFrame frame) => scalerFrames.Enqueue(frame);
Ejemplo n.º 5
0
 private void returnHwTransferFrame(FFmpegFrame frame) => hwTransferFrames.Enqueue(frame);