/**
     * Returns what the device's camera is capturing as a UV texture stored in the RG channels.
     */
    public Texture2D GetRealityUVTexture()
    {
        if (realityUVTexture != null)
        {
            return(realityUVTexture);
        }

        // Create a texture
        var    appEnvironment = bridge.GetXRAppEnvironment();
        var    envTex         = appEnvironment.getManagedCameraTextures().getUvTexture();
        IntPtr envTexPtr      = (IntPtr)envTex.getPtr();

        if (envTexPtr != IntPtr.Zero)
        {
            realityUVTexture = Texture2D.CreateExternalTexture(
                envTex.getWidth(),
                envTex.getHeight(),
                TextureFormat.RG16,
                false,
                false,
                envTexPtr);
        }
        else
        {
            int w = envTex.getWidth();
            int h = envTex.getHeight();
            realityUVTexture = new Texture2D(w, h, TextureFormat.RG16, false);
            byte[] grayBytes = new byte[w * h * 2];
            for (int i = 0; i < grayBytes.Length; i++)
            {
                grayBytes[i] = 128;
            }
            realityUVTexture.LoadRawTextureData(grayBytes);
            realityUVTexture.Apply();
        }
        // Set point filtering just so we can see the pixels clearly
        realityUVTexture.filterMode = FilterMode.Point;
        // Call Apply() so it's actually uploaded to the GPU
        realityUVTexture.Apply();

        if (envTexPtr == IntPtr.Zero)
        {
            // Pass texture pointer to the plugin only if it doesn't manage one itself.
            bridge.SetManagedCameraUVTexture(
                realityUVTexture.GetNativeTexturePtr(),
                realityUVTexture.width,
                realityUVTexture.height,
                GetRenderingSystem());
        }

        return(realityUVTexture);
    }