コード例 #1
0
        /// <summary>
        /// Sends the latest ARFoundation frame to Azure Spatial Anchors
        /// </summary>
        private void ProcessLatestFrame()
        {
            if (!isSessionStarted)
            {
                return;
            }

            var cameraParams = new XRCameraParams
            {
                zNear             = mainCamera.nearClipPlane,
                zFar              = mainCamera.farClipPlane,
                screenWidth       = Screen.width,
                screenHeight      = Screen.height,
                screenOrientation = Screen.orientation
            };

            XRCameraFrame xRCameraFrame;

            if (arCameraManager.subsystem.TryGetLatestFrame(cameraParams, out xRCameraFrame))
            {
                long latestFrameTimeStamp = xRCameraFrame.timestampNs;

                bool newFrameToProcess = latestFrameTimeStamp > lastFrameProcessedTimeStamp;

                if (newFrameToProcess)
                {
                    session.ProcessFrame(xRCameraFrame.nativePtr.GetPlatformPointer());
                    lastFrameProcessedTimeStamp = latestFrameTimeStamp;
                }
            }
        }
コード例 #2
0
        void Update()
        {
            if (subsystem == null)
            {
                return;
            }

            var cameraParams = new XRCameraParams
            {
                zNear             = m_Camera.nearClipPlane,
                zFar              = m_Camera.farClipPlane,
                screenWidth       = Screen.width,
                screenHeight      = Screen.height,
                screenOrientation = Screen.orientation
            };

            XRCameraFrame frame;

            if (subsystem.TryGetLatestFrame(cameraParams, out frame))
            {
                UpdateTexturesInfos(frame);

                if (frameReceived != null)
                {
                    InvokeFrameReceivedEvent(frame);
                }
            }
        }
コード例 #3
0
        /// <summary>
        /// Gets the state of localization against the global Earth map.
        /// </summary>
        /// <param name="cameraManager">The ARCameraManager instance.</param>
        /// <returns>The Earth localization state against the global Earth map.</returns>
        public static EarthLocalizationState GetEarthLocalizationState(
            this ARCameraManager cameraManager)
        {
            EarthLocalizationState state = EarthLocalizationState.NotLocalized;
            var cameraParams             = new XRCameraParams
            {
                zNear             = cameraManager.GetComponent <Camera>().nearClipPlane,
                zFar              = cameraManager.GetComponent <Camera>().farClipPlane,
                screenWidth       = Screen.width,
                screenHeight      = Screen.height,
                screenOrientation = Screen.orientation
            };

            if (!cameraManager.subsystem.TryGetLatestFrame(cameraParams, out XRCameraFrame frame))
            {
                Debug.LogWarning("Unable to determine the current EarthLocalizationState, " +
                                 "the current XRCameraFrame is not available, try again later.");
                return(state);
            }

            if (frame.timestampNs == 0 || frame.FrameHandle() == IntPtr.Zero)
            {
                Debug.LogWarning("Unable to determine the current EarthLocalizationState, " +
                                 "the current frame is not ready, try again later.");
                return(state);
            }

            return(FrameApi.GetEarthLocalizationState(
                       ARCoreExtensions._instance.currentARCoreSessionHandle, frame.FrameHandle()));
        }
コード例 #4
0
 private void ResetState()
 {
     this.cameraParams      = default;
     this.timestamp         = null;
     this.screenSize        = null;
     this.screenOrientation = null;
 }
コード例 #5
0
        private static bool TryGetLastFrameFromExtensions(out XRCameraFrame frame)
        {
            ARCoreExtensions extensions    = ARCoreExtensions._instance;
            ARCameraManager  cameraManager = extensions.CameraManager;
            var cameraParams = new XRCameraParams
            {
                zNear             = cameraManager.GetComponent <Camera>().nearClipPlane,
                zFar              = cameraManager.GetComponent <Camera>().farClipPlane,
                screenWidth       = Screen.width,
                screenHeight      = Screen.height,
                screenOrientation = Screen.orientation
            };

            if (!cameraManager.subsystem.TryGetLatestFrame(
                    cameraParams, out frame))
            {
                Debug.LogWarning(
                    "The current XRCameraFrame is not available, try again later.");
                return(false);
            }

            if (frame.timestampNs == 0 || frame.FrameHandle() == IntPtr.Zero)
            {
                Debug.LogWarning(
                    "The current XRCameraFrame is not ready, try again later.");
                return(false);
            }

            return(true);
        }
コード例 #6
0
        private void OnFrameUpdate(ARCameraFrameEventArgs frameEventArgs)
        {
            if (!ShouldUpdateARCoreSession())
            {
                return;
            }

            if (_sessionHandle == IntPtr.Zero)
            {
                return;
            }

            if (_frameHandle != IntPtr.Zero)
            {
                SessionApi.ReleaseFrame(_frameHandle);
                _frameHandle = IntPtr.Zero;
            }

            if (_arKitSession != null && _cameraManager != null && _arKitSession.enabled)
            {
                var cameraParams = new XRCameraParams
                {
                    zNear             = _cameraManager.GetComponent <Camera>().nearClipPlane,
                    zFar              = _cameraManager.GetComponent <Camera>().farClipPlane,
                    screenWidth       = Screen.width,
                    screenHeight      = Screen.height,
                    screenOrientation = Screen.orientation
                };

                if (!_cameraManager.subsystem.TryGetLatestFrame(
                        cameraParams, out XRCameraFrame frame))
                {
                    Debug.LogWarning("XRCamera's latest frame is not available now.");
                    return;
                }

                if (frame.timestampNs == 0 || frame.FrameHandle() == IntPtr.Zero)
                {
                    Debug.LogWarning("ARKit Plugin Frame is not ready.");
                    return;
                }

                var status = ExternApi.ArSession_updateAndAcquireArFrame(
                    _sessionHandle, frame.FrameHandle(), ref _frameHandle);
                if (status != ApiArStatus.Success)
                {
                    Debug.LogErrorFormat("Failed to update and acquire ARFrame with error: " +
                                         "{0}", status);
                    return;
                }
            }
        }
コード例 #7
0
    private void Update()
    {
        if (FritzPoseManager.Processing())
        {
            return;
        }

#if UNITY_ANDROID && !UNITY_EDITOR
        XRCameraImage image;
        if (!m_CameraManager.TryGetLatestImage(out image))
        {
            image.Dispose();
            return;
        }

        FritzPoseManager.ProcessPoseFromImageAsync(image);

        // You must dispose the CameraImage to avoid resource leaks.
        image.Dispose();
#elif UNITY_IOS && !UNITY_EDITOR
        var cameraParams = new XRCameraParams
        {
            zNear             = m_Cam.nearClipPlane,
            zFar              = m_Cam.farClipPlane,
            screenWidth       = Screen.width,
            screenHeight      = Screen.height,
            screenOrientation = Screen.orientation
        };

        XRCameraFrame frame;
        if (!m_CameraManager.subsystem.TryGetLatestFrame(cameraParams, out frame))
        {
            return;
        }
        FritzPoseManager.ProcessPoseFromFrameAsync(frame);
#else
        var randomPosition = debugPoint;
        randomPosition.x = randomPosition.x * UnityEngine.Random.Range(-0.5f, 0.5f);
        randomPosition.y = randomPosition.y * UnityEngine.Random.Range(-0.5f, 0.5f);

        Debug.LogFormat("{0}", bird);
        MoveBirdToPoint(bird, randomPosition);
        if (trackedObject != null)
        {
            trackedObject.transform.position = randomPosition;
        }
#endif
    }
コード例 #8
0
            public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame)
            {
                var remote = ARKitReceiver.Instance;

                if (remote == null)
                {
                    cameraFrame = default(XRCameraFrame);
                    return(false);
                }


                var remoteFrame = ARKitReceiver.Instance.CameraFrame;

                if (remoteFrame.timestampNs == default(long))
                {
                    cameraFrame = default(XRCameraFrame);
                    return(false);
                }

                const XRCameraFrameProperties properties =
                    XRCameraFrameProperties.Timestamp
                    | XRCameraFrameProperties.ProjectionMatrix
                    | XRCameraFrameProperties.DisplayMatrix;

                cameraFrame = new CameraFrame()
                {
                    timestampNs             = remoteFrame.timestampNs,
                    averageBrightness       = 0,
                    averageColorTemperature = 0,
                    colorCorrection         = default(Color),
                    projectionMatrix        = remoteFrame.projectionMatrix,
                    displayMatrix           = remoteFrame.displayMatrix,
                    trackingState           = TrackingState.Tracking,
                    nativePtr  = new IntPtr(0),
                    properties = properties,
                    averageIntensityInLumens = 0,
                    exposureDuration         = 0,
                    exposureOffset           = 0
                };

                // Debug.Log(cameraFrame);
                return(true);
            }
コード例 #9
0
            public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame)
            {
                if (!Application.isPlaying || !mockCamera.isPrepared)
                {
                    cameraFrame = default(XRCameraFrame);
                    return(false);
                }

                const XRCameraFrameProperties properties =
                    XRCameraFrameProperties.Timestamp
                    // | XRCameraFrameProperties.ProjectionMatrix
                    | XRCameraFrameProperties.DisplayMatrix;


                Matrix4x4 displayMatrix = GetDisplayTransform(
                    (float)mockCamera.texture.width / mockCamera.texture.height,
                    (float)Screen.width / Screen.height
                    );

                cameraFrame = (XRCameraFrame) new CameraFrame()
                {
                    timestampNs             = DateTime.Now.Ticks,
                    averageBrightness       = 0,
                    averageColorTemperature = 0,
                    colorCorrection         = default(Color),
                    projectionMatrix        = Matrix4x4.identity,
                    displayMatrix           = displayMatrix,
                    trackingState           = TrackingState.Tracking,
                    nativePtr  = new IntPtr(0),
                    properties = properties,
                    averageIntensityInLumens  = 0,
                    exposureDuration          = 0,
                    exposureOffset            = 0,
                    mainLightIntensityLumens  = 0,
                    mainLightColor            = default(Color),
                    ambientSphericalHarmonics = default(SphericalHarmonicsL2),
                    cameraGrain    = default(XRTextureDescriptor),
                    noiseIntensity = 0,
                };

                // Debug.Log(cameraFrame);
                return(true);
            }
コード例 #10
0
        /// <summary>
        /// Gets the set of data recorded to the given track available during playback on this
        /// frame.
        /// Note, currently playback continues internally while the session is paused. Therefore, on
        /// pause/resume, track data discovered internally will be discarded to prevent stale track
        /// data from flowing through when the session resumed.
        /// Note, if the app's frame rate is higher than ARCore's frame rate, subsequent
        /// <c><cref="XRCameraFrame"/></c> objects may reference the same underlying ARCore Frame,
        /// which would mean the list of <c><see cref="TrackData"/></c> returned could be the same.
        /// One can differentiate by examining <c><see cref="TrackData.FrameTimestamp"/></c>.
        /// </summary>
        /// <param name="trackId">The ID of the track being queried.</param>
        /// <returns>Returns a list of <see cref="TrackData"/>. Will be empty if none are available.
        /// </returns>
        public List <TrackData> GetUpdatedTrackData(Guid trackId)
        {
            if (ARCoreExtensions._instance.currentARCoreSessionHandle == IntPtr.Zero &&
                ARCoreExtensions._instance.Session.subsystem != null &&
                ARCoreExtensions._instance.Session.subsystem.nativePtr != null)
            {
                Debug.LogWarning("Failed to fetch track data. The Session is not yet available. " +
                                 "Try again later.");
                return(new List <TrackData>());
            }

            ARCameraManager cameraManager = ARCoreExtensions._instance.CameraManager;

            var cameraParams = new XRCameraParams
            {
                zNear             = cameraManager.GetComponent <Camera>().nearClipPlane,
                zFar              = cameraManager.GetComponent <Camera>().farClipPlane,
                screenWidth       = Screen.width,
                screenHeight      = Screen.height,
                screenOrientation = Screen.orientation
            };

            if (!cameraManager.subsystem.TryGetLatestFrame(cameraParams, out XRCameraFrame frame))
            {
                Debug.LogWarning("Failed to fetch track data. The current XRCameraFrame is not " +
                                 "available. Try again later.");
                return(new List <TrackData>());
            }

            if (frame.timestampNs == 0 || frame.nativePtr == IntPtr.Zero)
            {
                Debug.LogWarning("Failed to fetch track data. The current XRCameraFrame is not " +
                                 "ready. Try again later.");
                return(new List <TrackData>());
            }

            return(FrameApi.GetUpdatedTrackData(
                       ARCoreExtensions._instance.currentARCoreSessionHandle, frame.FrameHandle(),
                       trackId));
        }
コード例 #11
0
 /// <summary>
 /// Get the current camera frame for the subsystem.
 /// </summary>
 /// <param name="cameraParams">The current Unity <c>Camera</c> parameters.</param>
 /// <param name="cameraFrame">The current camera frame returned by the method.</param>
 /// <returns>
 /// <c>true</c> if the method successfully got a frame. Otherwise, <c>false</c>.
 /// </returns>
 public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame)
 {
     return(NativeApi.UnityARKit_Camera_TryGetFrame(cameraParams, out cameraFrame));
 }
コード例 #12
0
    private async Task createSessionAsync()
    {
        if (cloudSession != null)
        {
            //session already created, need to exit
            return;
        }



        cloudSession = new CloudSpatialAnchorSession();
        cloudSession.Configuration.AccountId   = "87f0e62e-2379-4bc4-bd1e-38dc699e8d6b";
        cloudSession.Configuration.AccessToken = "kF/SejjecfM8CatFa74V94iAsgApvPiaT1mY//qgK44=";
        cloudSession.Session = aRSession.subsystem.nativePtr.GetPlatformPointer();


#if UNITY_ANDROID // Android Only
        // We should only run the Java initialization once
        if (!javaInitialized)
        {
            // Create a TaskCompletionSource that we can use to know when
            // the Java plugin has completed initialization on the Android
            // thread.
            TaskCompletionSource <bool> pluginInit = new TaskCompletionSource <bool>();

            // Make sure ARCore is running. This code must be executed
            // on a Java thread provided by Android.
            AndroidHelper.Instance.DispatchUiThread(unityActivity =>
            {
                // Create the plugin
                using (AndroidJavaClass cloudServices = new AndroidJavaClass("com.microsoft.CloudServices"))
                {
                    // Initialize the plugin
                    cloudServices.CallStatic("initialize", unityActivity);

                    // Update static variable to say that the plugin has been initialized
                    javaInitialized = true;

                    // Set the task completion source so the CreateSession method can
                    // continue back on the Unity thread.
                    pluginInit.SetResult(true);
                }
            });

            // Wait for the plugin to complete initialization on the
            // Java thread.
            await pluginInit.Task;
        }
#endif


#if UNITY_ANDROID || UNITY_IOS
        var cameraParams = new XRCameraParams
        {
            zNear             = Camera.main.nearClipPlane,
            zFar              = Camera.main.farClipPlane,
            screenWidth       = Screen.width,
            screenHeight      = Screen.height,
            screenOrientation = Screen.orientation
        };

        XRCameraFrame xRCameraFrame;
        if (aRCameraManager.subsystem.TryGetLatestFrame(cameraParams, out xRCameraFrame))
        {
            long latestFrameTimeStamp = xRCameraFrame.timestampNs;

            bool newFrameToProcess = latestFrameTimeStamp > lastFrameProcessedTimeStamp;

            if (newFrameToProcess)
            {
                cloudSession.ProcessFrame(xRCameraFrame.nativePtr.GetPlatformPointer());
                lastFrameProcessedTimeStamp = latestFrameTimeStamp;
            }
        }
#endif
    }
コード例 #13
0
 public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame)
 {
     cameraFrame = default(XRCameraFrame);
     return(false);
 }
コード例 #14
0
        private void OnFrameUpdate(ARCameraFrameEventArgs frameEventArgs)
        {
            if (!_isEnabled)
            {
                return;
            }

            if (_sessionHandle == IntPtr.Zero)
            {
                return;
            }

            if (_frameHandle != IntPtr.Zero)
            {
                FrameApi.ReleaseFrame(_frameHandle);
                _frameHandle = IntPtr.Zero;
            }

            if (_arKitSession != null && _cameraManager != null && _arKitSession.enabled)
            {
                var cameraParams = new XRCameraParams
                {
                    zNear             = _cameraManager.GetComponent <Camera>().nearClipPlane,
                    zFar              = _cameraManager.GetComponent <Camera>().farClipPlane,
                    screenWidth       = Screen.width,
                    screenHeight      = Screen.height,
                    screenOrientation = Screen.orientation
                };

                if (!_cameraManager.subsystem.TryGetLatestFrame(
                        cameraParams, out XRCameraFrame frame))
                {
                    Debug.LogWarning("XRCamera's latest frame is not available now.");
                    return;
                }

                if (frame.timestampNs == 0 || frame.FrameHandle() == IntPtr.Zero)
                {
                    Debug.LogWarning("ARKit Plugin Frame is not ready.");
                    return;
                }

                var status = ExternApi.ArSession_updateAndAcquireArFrame(
                    _sessionHandle, frame.FrameHandle(), ref _frameHandle);
                if (status != ApiArStatus.Success)
                {
                    Debug.LogErrorFormat("Failed to update and acquire ARFrame with error: " +
                                         "{0}", status);
                    return;
                }

                // Update session configuration.
                if (ARCoreExtensions._instance.ARCoreExtensionsConfig != null &&
                    !ARCoreExtensions._instance.ARCoreExtensionsConfig.Equals(_cachedConfig))
                {
                    _cachedConfig = ScriptableObject.CreateInstance <ARCoreExtensionsConfig>();
                    _cachedConfig.CopyFrom(ARCoreExtensions._instance.ARCoreExtensionsConfig);
                    ConfigApi.ConfigureSession(_sessionHandle, _cachedConfig);
                }
            }
        }
コード例 #15
0
            public override bool TryGetFrame(XRCameraParams cameraParams, out UnityEngine.XR.ARSubsystems.XRCameraFrame cameraFrame)
            {
                var frame = new XRCameraFrame();
                XRCameraFrameProperties properties = 0;

                if (m_LastLightEstimation.m_AmbientBrightness.HasValue)
                {
                    frame.AverageBrightness = m_LastLightEstimation.m_AmbientBrightness.Value;
                    properties |= XRCameraFrameProperties.AverageBrightness;
                }

                if (m_LastLightEstimation.m_AmbientColorTemperature.HasValue)
                {
                    frame.AverageColorTemperature = m_LastLightEstimation.m_AmbientColorTemperature.Value;
                    properties |= XRCameraFrameProperties.AverageColorTemperature;
                }

                if (m_LastLightEstimation.m_ColorCorrection.HasValue)
                {
                    frame.ColorCorrection = m_LastLightEstimation.m_ColorCorrection.Value;
                    properties           |= XRCameraFrameProperties.ColorCorrection;
                }

#if ARSUBSYSTEMS_3_OR_NEWER
                if (m_LastLightEstimation.m_AmbientIntensityInLumens.HasValue)
                {
                    frame.AverageIntensityInLumens = m_LastLightEstimation.m_AmbientIntensityInLumens.Value;
                    properties |= XRCameraFrameProperties.AverageIntensityInLumens;
                }
#endif

#if ARSUBSYSTEMS_4_OR_NEWER
                if (m_LastLightEstimation.m_MainLightColor.HasValue)
                {
                    frame.MainLightColor = m_LastLightEstimation.m_MainLightColor.Value;
                    properties          |= XRCameraFrameProperties.MainLightColor;
                }

                if (m_LastLightEstimation.m_MainLightDirection.HasValue)
                {
                    frame.MainLightDirection = m_LastLightEstimation.m_MainLightDirection.Value;
                    properties |= XRCameraFrameProperties.MainLightDirection;
                }

                if (m_LastLightEstimation.m_MainLightIntensityLumens.HasValue)
                {
                    frame.MainLightIntensityLumens = m_LastLightEstimation.m_MainLightIntensityLumens.Value;
                    properties |= XRCameraFrameProperties.MainLightIntensityLumens;
                }

                if (m_LastLightEstimation.m_SphericalHarmonics.HasValue)
                {
                    frame.AmbientSphericalHarmonics = m_LastLightEstimation.m_SphericalHarmonics.Value;
                    properties |= XRCameraFrameProperties.AmbientSphericalHarmonics;
                }
#endif

                frame.Properties = properties;

                var union = new XRCameraFrameUnion {
                    m_OurXRCameraFrame = frame
                };
                cameraFrame = union.m_TheirXRCameraFrame;
                return(true);
            }
コード例 #16
0
            public override bool TryGetFrame(XRCameraParams cameraParams, out XRCameraFrame cameraFrame)
            {
                var timestamp = CameraApi.timestamp;

                if (this.cameraParams != cameraParams ||
                    this.timestamp != timestamp ||
                    this.screenSize != CameraApi.screenSize ||
                    this.screenOrientation != CameraApi.screenOrientation)
                {
                    try
                    {
                        var result = new XRCameraFrameMock();

                        if (CameraApi.timestamp.HasValue)
                        {
                            result.m_TimestampNs = CameraApi.timestamp.Value;
                            result.m_Properties  = result.m_Properties | XRCameraFrameProperties.Timestamp;
                        }

                        if (CameraApi.averageBrightness.HasValue)
                        {
                            result.m_AverageColorTemperature = CameraApi.averageBrightness.Value;
                            result.m_Properties = result.m_Properties | XRCameraFrameProperties.AverageBrightness;
                        }

                        if (CameraApi.averageColorTemperature.HasValue)
                        {
                            result.m_AverageColorTemperature = CameraApi.averageColorTemperature.Value;
                            result.m_Properties = result.m_Properties | XRCameraFrameProperties.AverageColorTemperature;
                        }

                        if (CameraApi.colorCorrection.HasValue)
                        {
                            result.m_ColorCorrection = CameraApi.colorCorrection.Value;
                            result.m_Properties      = result.m_Properties | XRCameraFrameProperties.ColorCorrection;
                        }

                        if (CameraApi.projectionMatrix.HasValue)
                        {
                            Matrix4x4 screenMatrix = Matrix4x4.identity;
                            if (CameraApi.screenSize.HasValue)
                            {
                                var sourceScreenSize = CameraApi.screenSize.Value;
                                var sourceAspect     = sourceScreenSize.x / sourceScreenSize.y;
                                var screenAspect     = cameraParams.screenWidth / cameraParams.screenHeight;
                                if (sourceAspect < screenAspect)
                                {
                                    screenMatrix.m00 = sourceAspect / screenAspect;
                                }
                                else
                                {
                                    screenMatrix.m11 = screenAspect / sourceAspect;
                                }
                            }

                            result.m_ProjectionMatrix = screenMatrix * CameraApi.projectionMatrix.Value;
                            result.m_Properties       = result.m_Properties | XRCameraFrameProperties.ProjectionMatrix;
                        }

                        if (CameraApi.displayMatrix.HasValue)
                        {
                            result.m_DisplayMatrix = CameraApi.displayMatrix.Value;
                            result.m_Properties    = result.m_Properties | XRCameraFrameProperties.DisplayMatrix;
                        }

                        result.m_TrackingState = TrackingState.Tracking;
                        result.m_NativePtr     = IntPtr.Zero;

                        result.Convert(out cameraFrame);
                        return(true);
                    }
                    finally
                    {
                        this.timestamp         = timestamp;
                        this.cameraParams      = cameraParams;
                        this.screenSize        = CameraApi.screenSize;
                        this.screenOrientation = CameraApi.screenOrientation;
                    }
                }

                cameraFrame = default;
                return(false);
            }