/**
     * Returns what the device's camera is capturing as a Y texture stored in the R channel.
     */
    public Texture2D GetRealityYTexture()
    {
        if (realityYTexture != null)
        {
            return(realityYTexture);
        }

        // Create a texture
        var    appEnvironment = bridge.GetXRAppEnvironment();
        var    envTex         = appEnvironment.getManagedCameraTextures().getYTexture();
        IntPtr envTexPtr      = (IntPtr)envTex.getPtr();

        if (envTexPtr != IntPtr.Zero)
        {
            realityYTexture = Texture2D.CreateExternalTexture(
                envTex.getWidth(),
                envTex.getHeight(),
                TextureFormat.R8,
                false,
                false,
                envTexPtr);
        }
        else
        {
            int w = envTex.getWidth();
            int h = envTex.getHeight();
            realityYTexture = new Texture2D(w, h, TextureFormat.R8, false);
            byte[] blackBytes = new byte[w * h];
            for (int i = 0; i < blackBytes.Length; i++)
            {
                blackBytes[i] = 0;
            }
            realityYTexture.LoadRawTextureData(blackBytes);
            realityYTexture.Apply();
        }

        captureAspect = envTex.getWidth() * 1.0f / envTex.getHeight();

        // Set point filtering just so we can see the pixels clearly
        realityYTexture.filterMode = FilterMode.Point;
        // Call Apply() so it's actually uploaded to the GPU
        realityYTexture.Apply();

        if (envTexPtr == IntPtr.Zero)
        {
            // Pass texture pointer to the plugin only if it doesn't manage one itself.
            bridge.SetManagedCameraYTexture(
                realityYTexture.GetNativeTexturePtr(),
                realityYTexture.width,
                realityYTexture.height,
                GetRenderingSystem());
        }

        return(realityYTexture);
    }