public void Update() { #if !UNITY_EDITOR // Check that motion tracking is tracking. if (NRFrame.SessionStatus != SessionState.Tracking) { return; } #endif // Get updated augmented images for this frame. NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New); // Create visualizers and anchors for updated augmented images that are tracking and do not previously // have a visualizer. Remove visualizers for stopped images. foreach (var image in m_TempTrackingImages) { TrackingImageVisualizer visualizer = null; m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { NRDebugger.Log("Create new TrackingImageVisualizer!"); // Create an anchor to ensure that NRSDK keeps tracking this augmented image. visualizer = (TrackingImageVisualizer)Instantiate(TrackingImageVisualizerPrefab, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.Image = image; visualizer.transform.parent = transform; m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } FitToScanOverlay.SetActive(false); } }
private void OnNetClosed() { NetState = ConnectState.DisConnected; NRDebugger.Log("OnNetClosed"); }
private void OnNetConnectError() { NetState = ConnectState.Error; NRDebugger.LogError("OnNetConnectError"); }
private void Update() { while (m_Requests.Count > 0) { var req = m_Requests.Peek(); var task = m_Tasks.Peek(); if (req.hasError) { NRDebugger.Log("GPU readback error detected"); m_Requests.Dequeue(); CommitResult(null, task); m_Tasks.Dequeue(); } else if (req.done) { var buffer = req.GetData <Color32>(); if (m_EncodeTempTex != null && m_EncodeTempTex.width != m_CameraParameters.cameraResolutionWidth && m_EncodeTempTex.height != m_CameraParameters.cameraResolutionHeight) { GameObject.Destroy(m_EncodeTempTex); m_EncodeTempTex = null; } if (m_EncodeTempTex == null) { m_EncodeTempTex = new Texture2D( m_CameraParameters.cameraResolutionWidth, m_CameraParameters.cameraResolutionHeight, TextureFormat.RGB24, false ); } m_EncodeTempTex.SetPixels32(buffer.ToArray()); m_EncodeTempTex.Apply(); if (task.OnReceive != null) { if (m_EncodeTempTex.width != task.Width || m_EncodeTempTex.height != task.Height) { Texture2D scaledtexture; NRDebugger.LogFormat("[BlendCamera] need to scale the texture which origin width:{0} and out put width:{1}", m_EncodeTempTex.width, task.Width); scaledtexture = ImageEncoder.ScaleTexture(m_EncodeTempTex, task.Width, task.Height); CommitResult(scaledtexture, task); //Destroy the scale temp texture. GameObject.Destroy(scaledtexture); } else { CommitResult(m_EncodeTempTex, task); } } m_Requests.Dequeue(); m_Tasks.Dequeue(); } else { break; } } }
public void Config(NativeEncodeConfig config) { EncodeConfig = config; NRDebugger.Log("Encode record Config:" + config.ToString()); }
/// <summary> Executes the 'hmd lost tracking' action. </summary> private void OnHMDLostTracking() { NRDebugger.Info("[NRHMDPoseTracker] OnHMDLostTracking:" + NRFrame.LostTrackingReason); ShowTips(TipType.LostTracking); }
/// <summary> /// Provides a COM pointer to the native IVideoDeviceController. /// A native COM pointer to the IVideoDeviceController. /// </summary> public IntPtr GetUnsafePointerToVideoDeviceController() { NRDebugger.LogWarning("[NRPhotoCapture] Interface not supported..."); return(IntPtr.Zero); }
// Update is called once per frame void Update() { #if !UNITY_EDITOR // Check that motion tracking is tracking. if (NRFrame.SessionStatus != SessionState.Tracking) { return; } #endif // Get updated augmented images for this frame. NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New); // Create visualizers and anchors for updated augmented images that are tracking and do not previously // have a visualizer. Remove visualizers for stopped images. foreach (var image in m_TempTrackingImages) { Visualizer visualizer = null; switch (image.GetDataBaseIndex()) { case 0: m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { // based on which image is detected, display correct prefab // Create an anchor to ensure that NRSDK keeps tracking this augmented image. // Is img tracking persistant? will prefab vanish if cannot see book visualizer = (Visualizer)Instantiate(flower1, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.pageFlower = image; visualizer.transform.parent = transform; m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); NRDebugger.Log("Found flower1.jpg!"); } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } break; case 1: m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { // based on which image is detected, display correct prefab // Create an anchor to ensure that NRSDK keeps tracking this augmented image. // Is img tracking persistant? will prefab vanish if cannot see book visualizer = (Visualizer)Instantiate(plane2, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.pageFlower = image; visualizer.transform.parent = transform; m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); NRDebugger.Log("Found plane2.jpg!"); } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } break; default: break; } FitToScanOverlay.SetActive(false); } }
/// <summary> Executes the 'hmd pose ready' action. </summary> private void OnHMDPoseReady() { NRDebugger.Info("[NRHMDPoseTracker] OnHMDPoseReady"); ShowTips(TipType.None); }
public void SetConfigration(NativeEncodeConfig config) { var result = NativeApi.HWEncoderSetConfigration(EncodeHandle, LitJson.JsonMapper.ToJson(config)); NRDebugger.Log("[Encode] SetConfigration :" + (result == 0).ToString()); }
public void UpdateSurface(IntPtr texture_id, UInt64 time_stamp) { var result = NativeApi.HWEncoderUpdateSurface(EncodeHandle, texture_id, time_stamp); NRDebugger.Log("[Encode] UpdateSurface :" + (result == 0).ToString()); }
public void Update() { #if UNITY_EDITOR if (virtualImageTrackingEnabled) { return; } else { TrackingImageVisualizer visualizer = null; NRDebugger.Log("Create new TrackingImageVisualizer!"); visualizer = (TrackingImageVisualizer)Instantiate( TrackingImageVisualizerPrefab, new Vector3(0f, 0f, 0f), Quaternion.identity); // visualizer.transform.parent = transform; visualizer.transform.parent = null; // add to root of scene visualizer.transform.localPosition = new Vector3(0, 0, 5f); virtualImageTrackingEnabled = true; // set flag Destroy(gameObject); // STOP image tracking return; } #endif #if !UNITY_EDITOR // Check that motion tracking is tracking. if (NRFrame.SessionStatus != SessionState.Running) { return; } #endif // Get updated augmented images for this frame. NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New); // Create visualizers and anchors for updated augmented images that are tracking and do not previously // have a visualizer. Remove visualizers for stopped images. foreach (var image in m_TempTrackingImages) { TrackingImageVisualizer visualizer = null; m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer); if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null) { NRDebugger.Log("Create new TrackingImageVisualizer!"); // Create an anchor to ensure that NRSDK keeps tracking this augmented image. visualizer = (TrackingImageVisualizer)Instantiate(TrackingImageVisualizerPrefab, image.GetCenterPose().position, image.GetCenterPose().rotation); visualizer.Image = image; // visualizer.transform.parent = transform; visualizer.transform.parent = null; // add to root of scene m_Visualizers.Add(image.GetDataBaseIndex(), visualizer); Destroy(gameObject); // STOP image tracking } else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null) { m_Visualizers.Remove(image.GetDataBaseIndex()); Destroy(visualizer.gameObject); } //FitToScanOverlay.SetActive(false); } }
/// <summary> Executes the 'stopped video capture mode' action. </summary> /// <param name="result"> The result.</param> void OnStoppedVideoCaptureMode(NRVideoCapture.VideoCaptureResult result) { NRDebugger.Info("Stopped Video Capture Mode!"); }
/// <summary> Executes the 'stopped recording video' action. </summary> /// <param name="result"> The result.</param> void OnStoppedRecordingVideo(NRVideoCapture.VideoCaptureResult result) { NRDebugger.Info("Stopped Recording Video!"); m_VideoCapture.StopVideoModeAsync(OnStoppedVideoCaptureMode); }
/// <summary> Executes the 'started recording video' action. </summary> /// <param name="result"> The result.</param> void OnStartedRecordingVideo(NRVideoCapture.VideoCaptureResult result) { NRDebugger.Info("Started Recording Video!"); }
/// <summary> Executes the 'started video capture mode' action. </summary> /// <param name="result"> The result.</param> void OnStartedVideoCaptureMode(NRVideoCapture.VideoCaptureResult result) { NRDebugger.Info("Started Video Capture Mode!"); m_VideoCapture.StartRecordingAsync(VideoSavePath, OnStartedRecordingVideo); }