/// <summary>
        /// Gets the state of localization against the global Earth map.
        /// </summary>
        /// <param name="cameraManager">The ARCameraManager instance.</param>
        /// <returns>The Earth localization state against the global Earth map.</returns>
        public static EarthLocalizationState GetEarthLocalizationState(
            this ARCameraManager cameraManager)
        {
            EarthLocalizationState state = EarthLocalizationState.NotLocalized;
            var cameraParams             = new XRCameraParams
            {
                zNear             = cameraManager.GetComponent <Camera>().nearClipPlane,
                zFar              = cameraManager.GetComponent <Camera>().farClipPlane,
                screenWidth       = Screen.width,
                screenHeight      = Screen.height,
                screenOrientation = Screen.orientation
            };

            if (!cameraManager.subsystem.TryGetLatestFrame(cameraParams, out XRCameraFrame frame))
            {
                Debug.LogWarning("Unable to determine the current EarthLocalizationState, " +
                                 "the current XRCameraFrame is not available, try again later.");
                return(state);
            }

            if (frame.timestampNs == 0 || frame.FrameHandle() == IntPtr.Zero)
            {
                Debug.LogWarning("Unable to determine the current EarthLocalizationState, " +
                                 "the current frame is not ready, try again later.");
                return(state);
            }

            return(FrameApi.GetEarthLocalizationState(
                       ARCoreExtensions._instance.currentARCoreSessionHandle, frame.FrameHandle()));
        }
Exemple #2
0
        /// <summary>
        /// Attempt to get the latest person mask data that corresponds to the current frame
        /// for CPU access. Each pixel represents the confidence of the segmentation result:
        /// 255 represents high confidence that the pixel is part of a person, 0 represents
        /// high confidence that the pixel is not part of a person. If the data is available,
        /// it is copied into the output image buffer.
        /// </summary>
        /// <param name="occlusionManager">The AROcclusionManager instance.</param>
        /// <param name="outputBuffer">
        /// The output image buffer to be filled with raw image data.</param>
        /// <returns>If available, returns a Vector2Int which represents the size of the image.
        /// Otherwise, returns <c>Vector2Int.zero</c> and logs the failure reason, e.g.
        /// Segmentation Mode is Disabled, or current camera configuration s incompatible with
        /// <c><see cref="SegmentationMode"/></c>.<c>People</c>.</returns>
        public static Vector2Int TryAcquirePersonMaskRawData(
            this AROcclusionManager occlusionManager, ref byte[] outputBuffer)
        {
            if (ARCoreExtensions._instance.ARCoreExtensionsConfig.SegmentationMode !=
                SegmentationMode.People)
            {
                Debug.LogWarning(
                    "Person mask data is not available when SegmentationMode is not People.");
                return(Vector2Int.zero);
            }

            if (!TryGetLastFrameFromExtensions(out XRCameraFrame frame))
            {
                return(Vector2Int.zero);
            }

            Vector2Int imageSize     = Vector2Int.zero;
            IntPtr     sessionHandle = ARCoreExtensions._instance.currentARCoreSessionHandle;
            IntPtr     imageHandle   = FrameApi.AcquirePersonMaskImage(
                sessionHandle, frame.FrameHandle());

            if (imageHandle != IntPtr.Zero)
            {
                imageSize = ImageApi.UpdateRawData(
                    sessionHandle, imageHandle, ref outputBuffer);
                ImageApi.Release(imageHandle);
            }

            return(imageSize);
        }
Exemple #3
0
        /// <summary>
        /// Get the 4x4 image display matrix for the camera frame. This is used by the texture
        /// populated based on CPU images
        /// to calculate the display coordinates.
        /// </summary>
        /// <param name="frame">The XRCameraFrame instance.</param>
        /// <returns>The 4x4 image display matrix.</returns>
        public static Matrix4x4 GetImageDisplayMatrix(this XRCameraFrame frame)
        {
            // Unity Screen Coordinate:      Android Screen Coordinate (flipped Y-Axis):
            // (0, 1)      (1, 1)            (0, 0)      (1, 0)
            // |----------------|            |----------------|
            // |                |            |                |
            // |                |            |                |
            // |                |            |                |
            // |                |            |                |
            // |                |            |                |
            // |                |            |                |
            // |                |            |                |
            // |----------------|            |----------------|
            // (0, 0)      (1, 0)            (0, 1)      (1, 1)
            IntPtr sessionHandle = ARCoreExtensions._instance.currentARCoreSessionHandle;

            // X-Axis (1, 0) in Unity view maps to (1, 1) on Android screen.
            Vector2 affineBasisX = new Vector2(1.0f, 1.0f);

            // Y-Axis (0, 1) in Unity view maps to (0, 0) on Android screen.
            Vector2 affineBasisY = new Vector2(0.0f, 0.0f);

            // Origin (0, 0) in Unity view maps to (0, 1) on Android screen.
            Vector2 affineOrigin = new Vector2(0.0f, 1.0f);

            Vector2 transformedX = FrameApi.TransformCoordinates2d(
                sessionHandle, frame.FrameHandle(), ApiCoordinates2dType.ViewNormalized,
                ApiCoordinates2dType.ImageNormalized, ref affineBasisX);
            Vector2 transformedY = FrameApi.TransformCoordinates2d(
                sessionHandle, frame.FrameHandle(), ApiCoordinates2dType.ViewNormalized,
                ApiCoordinates2dType.ImageNormalized, ref affineBasisY);
            Vector2 transformedOrigin = FrameApi.TransformCoordinates2d(
                sessionHandle, frame.FrameHandle(), ApiCoordinates2dType.ViewNormalized,
                ApiCoordinates2dType.ImageNormalized, ref affineOrigin);

            Matrix4x4 imageMatrix = Matrix4x4.identity;

            imageMatrix[0, 0] = transformedX.x - transformedOrigin.x;
            imageMatrix[0, 1] = transformedX.y - transformedOrigin.y;
            imageMatrix[1, 0] = transformedY.x - transformedOrigin.x;
            imageMatrix[1, 1] = transformedY.y - transformedOrigin.y;
            imageMatrix[2, 0] = transformedOrigin.x;
            imageMatrix[2, 1] = transformedOrigin.y;

            return(imageMatrix);
        }
Exemple #4
0
        /// <summary>
        /// A texture representing the confidence for each pixel in the raw depth for
        /// the current frame. See the <a href="https://developers.google.com/ar/eap/raw-depth">
        /// developer guide</a> for more information about raw depth.
        /// </summary>
        /// <param name="occlusionManager">The AROcclusionManager instance.</param>
        /// <returns>
        /// The environment raw depth confidence texture, if any. Otherwise, null.
        /// </returns>
        public static Texture2D GetEnvironmentRawDepthConfidenceTexture(
            this AROcclusionManager occlusionManager)
        {
            if (occlusionManager.currentEnvironmentDepthMode ==
                EnvironmentDepthMode.Disabled)
            {
                Debug.LogWarning(
                    "Environment raw depth confidence texture is not available" +
                    " when EnvironmentDepthMode is Disabled.");
                return(null);
            }

            if (!TryGetLastFrameFromExtensions(out XRCameraFrame frame))
            {
                return(null);
            }

            if (CachedData.TryGetCachedData(
                    CachedData.RawDepthConfidenceTexture, out Texture2D texture) &&
                CachedData.TryGetCachedData(
                    CachedData.RawDepthConfidenceTimestamp, out long timestamp) &&
                texture != null && timestamp == frame.timestampNs)
            {
                return(texture);
            }

            IntPtr imageHandle = FrameApi.AcquireRawDepthConfidenceImage(
                ARCoreExtensions._instance.currentARCoreSessionHandle,
                frame.FrameHandle());

            if (imageHandle == IntPtr.Zero)
            {
                return(null);
            }

            ImageApi.UpdateTexture(
                ARCoreExtensions._instance.currentARCoreSessionHandle, imageHandle,
                TextureFormat.Alpha8, ref texture);
            ImageApi.Release(imageHandle);
            CachedData.SetCachedData(CachedData.RawDepthConfidenceTexture, texture);
            CachedData.SetCachedData(CachedData.RawDepthConfidenceTimestamp, frame.timestampNs);

            return(texture);
        }
Exemple #5
0
        /// <summary>
        /// Get the latest person mask texture that corresponds to the current frame in
        /// <a href="https://docs.unity3d.com/ScriptReference/TextureFormat.Alpha8.html">
        /// TextureFormat.Alpha8</a> format where each pixel represents the confidence of
        /// the segmentation result: 255 represents high confidence that the pixel is part of
        /// a person, 0 represents high confidence that the pixel is not part of a person.
        /// </summary>
        /// <param name="occlusionManager">The AROcclusionManager instance.</param>
        /// <returns>If available, the texture containing the person mask.
        /// Otherwise, returns null and logs the failure reason,
        /// e.g. Segmentation Mode is Disabled, or current camera configuration
        /// is incompatible with <c><see cref="SegmentationMode"/></c>.<c>People</c>.</returns>
        public static Texture2D GetPersonMaskTexture(this AROcclusionManager occlusionManager)
        {
            if (ARCoreExtensions._instance.ARCoreExtensionsConfig.SegmentationMode !=
                SegmentationMode.People)
            {
                Debug.LogWarning(
                    "Person mask texture is not available when SegmentationMode is not People.");
                return(null);
            }

            if (!TryGetLastFrameFromExtensions(out XRCameraFrame frame))
            {
                return(null);
            }

            if (CachedData.TryGetCachedData(
                    CachedData.PersonMaskTexture, out Texture2D texture) &&
                CachedData.TryGetCachedData(
                    CachedData.PersonMaskTextureTimestamp, out long timestamp) &&
                texture != null && timestamp == frame.timestampNs)
            {
                return(texture);
            }

            IntPtr sessionHandle = ARCoreExtensions._instance.currentARCoreSessionHandle;
            IntPtr imageHandle   = FrameApi.AcquirePersonMaskImage(
                sessionHandle, frame.FrameHandle());

            if (imageHandle != IntPtr.Zero)
            {
                ImageApi.UpdateTexture(
                    sessionHandle, imageHandle, TextureFormat.Alpha8, ref texture);
                ImageApi.Release(imageHandle);
                CachedData.SetCachedData(CachedData.PersonMaskTexture, texture);
                CachedData.SetCachedData(
                    CachedData.PersonMaskTextureTimestamp, frame.timestampNs);
            }

            return(texture);
        }
Exemple #6
0
        /// <summary>
        /// Gets the set of data recorded to the given track available during playback on this
        /// frame.
        /// Note, currently playback continues internally while the session is paused. Therefore, on
        /// pause/resume, track data discovered internally will be discarded to prevent stale track
        /// data from flowing through when the session resumed.
        /// Note, if the app's frame rate is higher than ARCore's frame rate, subsequent
        /// <c><cref="XRCameraFrame"/></c> objects may reference the same underlying ARCore Frame,
        /// which would mean the list of <c><see cref="TrackData"/></c> returned could be the same.
        /// One can differentiate by examining <c><see cref="TrackData.FrameTimestamp"/></c>.
        /// </summary>
        /// <param name="trackId">The ID of the track being queried.</param>
        /// <returns>Returns a list of <see cref="TrackData"/>. Will be empty if none are available.
        /// </returns>
        public List <TrackData> GetUpdatedTrackData(Guid trackId)
        {
            if (ARCoreExtensions._instance.currentARCoreSessionHandle == IntPtr.Zero &&
                ARCoreExtensions._instance.Session.subsystem != null &&
                ARCoreExtensions._instance.Session.subsystem.nativePtr != null)
            {
                Debug.LogWarning("Failed to fetch track data. The Session is not yet available. " +
                                 "Try again later.");
                return(new List <TrackData>());
            }

            ARCameraManager cameraManager = ARCoreExtensions._instance.CameraManager;

            var cameraParams = new XRCameraParams
            {
                zNear             = cameraManager.GetComponent <Camera>().nearClipPlane,
                zFar              = cameraManager.GetComponent <Camera>().farClipPlane,
                screenWidth       = Screen.width,
                screenHeight      = Screen.height,
                screenOrientation = Screen.orientation
            };

            if (!cameraManager.subsystem.TryGetLatestFrame(cameraParams, out XRCameraFrame frame))
            {
                Debug.LogWarning("Failed to fetch track data. The current XRCameraFrame is not " +
                                 "available. Try again later.");
                return(new List <TrackData>());
            }

            if (frame.timestampNs == 0 || frame.nativePtr == IntPtr.Zero)
            {
                Debug.LogWarning("Failed to fetch track data. The current XRCameraFrame is not " +
                                 "ready. Try again later.");
                return(new List <TrackData>());
            }

            return(FrameApi.GetUpdatedTrackData(
                       ARCoreExtensions._instance.currentARCoreSessionHandle, frame.FrameHandle(),
                       trackId));
        }