/// <summary> Update is called once per frame. </summary> void Update() { if (TargetType == TrackableType.TrackableImage) { NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.All); foreach (var trackableImage in m_TempTrackingImages) { if (trackableImage.GetDataBaseIndex() == m_TrackableBehaviour.DatabaseIndex) { if (trackableImage.GetTrackingState() == TrackingState.Tracking) { if (FoundEvent != null) { FoundEvent(trackableImage.GetCenterPose().position, trackableImage.GetCenterPose().rotation); } } else { if (LostEvnet != null) { LostEvnet(); } } break; } } } else if (TargetType == TrackableType.TrackablePlane) { NRFrame.GetTrackables <NRTrackablePlane>(m_TempTrackingPlane, NRTrackableQueryFilter.All); foreach (var trackablePlane in m_TempTrackingPlane) { if (m_TrackableBehaviour.DatabaseIndex == -1) { m_TrackableBehaviour.DatabaseIndex = trackablePlane.GetDataBaseIndex(); } if (trackablePlane.GetDataBaseIndex() == m_TrackableBehaviour.DatabaseIndex) { if (trackablePlane.GetTrackingState() == TrackingState.Tracking) { if (FoundEvent != null) { FoundEvent(trackablePlane.GetCenterPose().position, trackablePlane.GetCenterPose().rotation); } } else { if (LostEvnet != null) { LostEvnet(); } } break; } } } }
/// <summary> Updates the head pose by timestamp described by timestamp. </summary> /// <param name="timestamp"> The timestamp.</param> private void UpdateHeadPoseByTimestamp(UInt64 timestamp) { Pose head_pose = Pose.identity; var result = NRFrame.GetHeadPoseByTime(ref head_pose, timestamp, m_PredictTime); if (result) { RGBCameraRig.transform.localPosition = head_pose.position; RGBCameraRig.transform.localRotation = head_pose.rotation; } }
public void Update() { NRFrame.GetTrackables <NRTrackablePlane>(m_NewPlanes, NRTrackableQueryFilter.New); for (int i = 0; i < m_NewPlanes.Count; i++) { // Instantiate a plane visualization prefab and set it to track the new plane. The transform is set to // the origin with an identity rotation since the mesh for our prefab is updated in Unity World coordinates. GameObject planeObject = Instantiate(DetectedPlanePrefab, Vector3.zero, Quaternion.identity, transform); planeObject.GetComponent <NRTrackableBehaviour>().Initialize(m_NewPlanes[i]); } }
public void Update() { NRFrame.GetTrackables <NRTrackableImage>(m_NewMarkers, NRTrackableQueryFilter.New); for (int i = 0; i < m_NewMarkers.Count; i++) { Debug.Log("[MarkerDetecter] Get New TrackableImages!! " + m_NewMarkers[i].TrackableNativeHandle); // Instantiate a visualization marker. NRAnchor anchor = m_NewMarkers[i].CreateAnchor(); GameObject markerObject = Instantiate(DetectedMarkerPrefab, Vector3.zero, Quaternion.identity, anchor.transform); markerObject.GetComponent <TrackableImageBehaviour>().Initialize(m_NewMarkers[i]); } }
private void Initialize() { if (m_IsInitialized) { return; } #if !UNITY_EDITOR bool result; m_TargetCamera = gameObject.GetComponent <Camera>(); var matrix_data = NRFrame.GetEyeProjectMatrix(out result, m_TargetCamera.nearClipPlane, m_TargetCamera.farClipPlane); if (result) { var eyeposFromHead = NRFrame.EyePosFromHead; switch (EyeType) { case NativeEye.LEFT: m_TargetCamera.projectionMatrix = matrix_data.LEyeMatrix; Debug.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString()); transform.localPosition = eyeposFromHead.LEyePose.position; transform.localRotation = eyeposFromHead.LEyePose.rotation; Debug.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString()); break; case NativeEye.RIGHT: m_TargetCamera.projectionMatrix = matrix_data.REyeMatrix; Debug.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString()); transform.localPosition = eyeposFromHead.REyePose.position; transform.localRotation = eyeposFromHead.REyePose.rotation; Debug.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString()); break; case NativeEye.RGB: m_TargetCamera.projectionMatrix = matrix_data.RGBEyeMatrix; Debug.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString()); transform.localPosition = eyeposFromHead.RGBEyePos.position; transform.localRotation = eyeposFromHead.RGBEyePos.rotation; Debug.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString()); break; default: break; } m_IsInitialized = true; } #else m_TargetCamera = gameObject.GetComponent <Camera>(); m_TargetCamera.projectionMatrix = matrix; m_IsInitialized = true; #endif }
private IEnumerator Initialize() { bool result; EyeProjectMatrixData matrix_data = NRFrame.GetEyeProjectMatrix(out result, m_TargetCamera.nearClipPlane, m_TargetCamera.farClipPlane); while (!result) { Debug.Log("Waitting to initialize camera param."); yield return(new WaitForEndOfFrame()); matrix_data = NRFrame.GetEyeProjectMatrix(out result, m_TargetCamera.nearClipPlane, m_TargetCamera.farClipPlane); } var eyeposFromHead = NRFrame.EyePosFromHead; switch (EyeType) { case NativeEye.LEFT: m_TargetCamera.projectionMatrix = matrix_data.LEyeMatrix; NRDebugger.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString()); transform.localPosition = eyeposFromHead.LEyePose.position; transform.localRotation = eyeposFromHead.LEyePose.rotation; NRDebugger.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString()); break; case NativeEye.RIGHT: m_TargetCamera.projectionMatrix = matrix_data.REyeMatrix; NRDebugger.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString()); transform.localPosition = eyeposFromHead.REyePose.position; transform.localRotation = eyeposFromHead.REyePose.rotation; NRDebugger.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString()); break; case NativeEye.RGB: m_TargetCamera.projectionMatrix = matrix_data.RGBEyeMatrix; NRDebugger.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString()); transform.localPosition = eyeposFromHead.RGBEyePos.position; transform.localRotation = eyeposFromHead.RGBEyePos.rotation; NRDebugger.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString()); break; default: break; } }
public void Update() { NRFrame.GetTrackables <NRTrackableImage>(m_NewMarkers, NRTrackableQueryFilter.All); for (int i = 0; i < m_NewMarkers.Count; i++) { var detectedMarker = m_NewMarkers[i]; if (detectedMarker != null && detectedMarker.GetTrackingState() == TrackingState.Tracking) { Vector2 size = detectedMarker.Size; DetectedMarkerPrefab.transform.localScale = new Vector3(size.x, size.y, size.x); var pose = detectedMarker.GetCenterPose(); var up_offset = pose.up * size.x * 0.5f; DetectedMarkerPrefab.transform.position = pose.position + up_offset + OffSet; DetectedMarkerPrefab.transform.rotation = pose.rotation; } } }
private void CheckTrackingDetection() { NRFrame.GetTrackables(_tempTrackingImages, NRTrackableQueryFilter.All); if (_tempTrackingImages.Count == 0) { return; } foreach (var image in _tempTrackingImages) { if (image.GetTrackingState() == TrackingState.Tracking) { DetectedUpdateAnchor(image); } } }
/// <summary> /// The Unity Update method. /// </summary> public void Update() { // Check that motion tracking is tracking. if (NRFrame.SessionStatus != SessionState.Tracking) { return; } // Iterate over planes found in this frame and instantiate corresponding GameObjects to visualize them. NRFrame.GetTrackables <NRTrackablePlane>(m_NewPlanes, NRTrackableQueryFilter.New); for (int i = 0; i < m_NewPlanes.Count; i++) { // Instantiate a plane visualization prefab and set it to track the new plane. The transform is set to // the origin with an identity rotation since the mesh for our prefab is updated in Unity World // coordinates. GameObject planeObject = Instantiate(DetectedPlanePrefab, Vector3.zero, Quaternion.identity, transform); planeObject.GetComponent <DetectedPlaneVisualizer>().Initialize(m_NewPlanes[i]); } }
public void Update() { #if !UNITY_EDITOR // Check that motion tracking is tracking. if (NRFrame.SessionStatus != SessionState.Tracking) { return; } #endif // Get updated augmented images for this frame. NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New); // Create visualizers and anchors for updated augmented images that are tracking and do not previously // have a visualizer. Remove visualizers for stopped images. foreach (var image in m_TempTrackingImages) { TrackingImageVisualizer visualizer = null; m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { NRDebugger.Log("Create new TrackingImageVisualizer!"); // Create an anchor to ensure that NRSDK keeps tracking this augmented image. visualizer = (TrackingImageVisualizer)Instantiate(TrackingImageVisualizerPrefab, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.Image = image; visualizer.transform.parent = transform; m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } FitToScanOverlay.SetActive(false); } }
// Update is called once per frame void Update() { #if !UNITY_EDITOR // Check that motion tracking is tracking. if (NRFrame.SessionStatus != SessionState.Tracking) { return; } #endif // Get updated augmented images for this frame. NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New); // Create visualizers and anchors for updated augmented images that are tracking and do not previously // have a visualizer. Remove visualizers for stopped images. foreach (var image in m_TempTrackingImages) { Visualizer visualizer = null; switch (image.GetDataBaseIndex()) { case 0: m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { // based on which image is detected, display correct prefab // Create an anchor to ensure that NRSDK keeps tracking this augmented image. // Is img tracking persistant? will prefab vanish if cannot see book visualizer = (Visualizer)Instantiate(flower1, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.pageFlower = image; visualizer.transform.parent = transform; m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); NRDebugger.Log("Found flower1.jpg!"); } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } break; case 1: m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { // based on which image is detected, display correct prefab // Create an anchor to ensure that NRSDK keeps tracking this augmented image. // Is img tracking persistant? will prefab vanish if cannot see book visualizer = (Visualizer)Instantiate(plane2, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.pageFlower = image; visualizer.transform.parent = transform; m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); NRDebugger.Log("Found plane2.jpg!"); } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } break; default: break; } FitToScanOverlay.SetActive(false); } }
public void Update() { #if UNITY_EDITOR if (virtualImageTrackingEnabled) { return; } else { TrackingImageVisualizer visualizer = null; NRDebugger.Log("Create new TrackingImageVisualizer!"); visualizer = (TrackingImageVisualizer)Instantiate( TrackingImageVisualizerPrefab, new Vector3(0f, 0f, 0f), Quaternion.identity); // visualizer.transform.parent = transform; visualizer.transform.parent = null; // add to root of scene visualizer.transform.localPosition = new Vector3(0, 0, 5f); virtualImageTrackingEnabled = true; // set flag Destroy(gameObject); // STOP image tracking return; } #endif #if !UNITY_EDITOR // Check that motion tracking is tracking. if (NRFrame.SessionStatus != SessionState.Running) { return; } #endif // Get updated augmented images for this frame. NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New); // Create visualizers and anchors for updated augmented images that are tracking and do not previously // have a visualizer. Remove visualizers for stopped images. foreach (var image in m_TempTrackingImages) { TrackingImageVisualizer visualizer = null; m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { NRDebugger.Log("Create new TrackingImageVisualizer!"); // Create an anchor to ensure that NRSDK keeps tracking this augmented image. visualizer = (TrackingImageVisualizer)Instantiate(TrackingImageVisualizerPrefab, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.Image = image; // visualizer.transform.parent = transform; visualizer.transform.parent = null; // add to root of scene m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); Destroy(gameObject); // STOP image tracking } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } //FitToScanOverlay.SetActive(false); } }