public void Commit(RenderTexture rt, UInt64 timestamp)
        {
            if (!m_IsStarted)
            {
                return;
            }
#if !UNITY_EDITOR
            NativeEncoder.UpdateSurface(rt.GetNativeTexturePtr(), timestamp);
#endif
            NRDebugger.Log("Encode record Commit...");
        }
        private void OnNetConnected()
        {
            NetState = ConnectState.Connected;
            NRDebugger.Log("OnNetConnected");

            if (Player != null)
            {
                CreateNetObjRequest(Player);
            }

            StartCoroutine(HeartBeatFunc());
        }
        public void Stop()
        {
            if (!m_IsStarted)
            {
                return;
            }
#if !UNITY_EDITOR
            NativeEncoder.Stop();
#endif
            NRDebugger.Log("Encode record Stop");
            m_IsStarted = false;
        }
        public void Start()
        {
            if (m_IsStarted)
            {
                return;
            }
#if !UNITY_EDITOR
            NativeEncoder.SetConfigration(EncodeConfig);
            GL.IssuePluginEvent(RenderThreadHandlePtr, STARTENCODEEVENT);
#endif
            NRDebugger.Log("Encode record Start");
            m_IsStarted = true;
        }
Example #5
0
        public static Texture2D ScaleTexture(Texture2D source, int targetWidth, int targetHeight)
        {
            NRDebugger.Log("[ImageEncoder] ScaleTexture..");
            Texture2D result = new Texture2D(targetWidth, targetHeight, source.format, false);

            for (int i = 0; i < result.height; ++i)
            {
                for (int j = 0; j < result.width; ++j)
                {
                    Color newColor = source.GetPixelBilinear((float)j / (float)result.width, (float)i / (float)result.height);
                    result.SetPixel(j, i, newColor);
                }
            }

            result.Apply();
            return(result);
        }
Example #6
0
        private IEnumerator Initialize()
        {
            bool result;

            EyeProjectMatrixData matrix_data = NRFrame.GetEyeProjectMatrix(out result, m_TargetCamera.nearClipPlane, m_TargetCamera.farClipPlane);

            while (!result)
            {
                Debug.Log("Waitting to initialize camera param.");
                yield return(new WaitForEndOfFrame());

                matrix_data = NRFrame.GetEyeProjectMatrix(out result, m_TargetCamera.nearClipPlane, m_TargetCamera.farClipPlane);
            }

            var eyeposFromHead = NRFrame.EyePosFromHead;

            switch (EyeType)
            {
            case NativeEye.LEFT:
                m_TargetCamera.projectionMatrix = matrix_data.LEyeMatrix;
                NRDebugger.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString());
                transform.localPosition = eyeposFromHead.LEyePose.position;
                transform.localRotation = eyeposFromHead.LEyePose.rotation;
                NRDebugger.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString());
                break;

            case NativeEye.RIGHT:
                m_TargetCamera.projectionMatrix = matrix_data.REyeMatrix;
                NRDebugger.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString());
                transform.localPosition = eyeposFromHead.REyePose.position;
                transform.localRotation = eyeposFromHead.REyePose.rotation;
                NRDebugger.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString());
                break;

            case NativeEye.RGB:
                m_TargetCamera.projectionMatrix = matrix_data.RGBEyeMatrix;
                NRDebugger.Log("[Matrix] RGB Camera Project Matrix :" + m_TargetCamera.projectionMatrix.ToString());
                transform.localPosition = eyeposFromHead.RGBEyePos.position;
                transform.localRotation = eyeposFromHead.RGBEyePos.rotation;
                NRDebugger.LogFormat("RGB Camera pos:{0} rotation:{1}", transform.localPosition.ToString(), transform.localRotation.ToString());
                break;

            default:
                break;
            }
        }
Example #7
0
        public void Update()
        {
#if !UNITY_EDITOR
            // Check that motion tracking is tracking.
            if (NRFrame.SessionStatus != SessionState.Tracking)
            {
                return;
            }
#endif
            // Get updated augmented images for this frame.
            NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New);

            // Create visualizers and anchors for updated augmented images that are tracking and do not previously
            // have a visualizer. Remove visualizers for stopped images.
            foreach (var image in m_TempTrackingImages)
            {
                TrackingImageVisualizer visualizer = null;
                m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer);
                if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null)
                {
                    NRDebugger.Log("Create new TrackingImageVisualizer!");
                    // Create an anchor to ensure that NRSDK keeps tracking this augmented image.
                    visualizer                  = (TrackingImageVisualizer)Instantiate(TrackingImageVisualizerPrefab, image.GetCenterPose().position, image.GetCenterPose().rotation);
                    visualizer.Image            = image;
                    visualizer.transform.parent = transform;
                    m_Visualizers.Add(image.GetDataBaseIndex(), visualizer);
                }
                else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null)
                {
                    m_Visualizers.Remove(image.GetDataBaseIndex());
                    Destroy(visualizer.gameObject);
                }

                FitToScanOverlay.SetActive(false);
            }
        }
 private void OnNetClosed()
 {
     NetState = ConnectState.DisConnected;
     NRDebugger.Log("OnNetClosed");
 }
 public void Config(NativeEncodeConfig config)
 {
     EncodeConfig = config;
     NRDebugger.Log("Encode record Configļ¼š" + config.ToString());
 }
Example #10
0
        private void Update()
        {
            while (m_Requests.Count > 0)
            {
                var req  = m_Requests.Peek();
                var task = m_Tasks.Peek();

                if (req.hasError)
                {
                    NRDebugger.Log("GPU readback error detected");
                    m_Requests.Dequeue();

                    CommitResult(null, task);
                    m_Tasks.Dequeue();
                }
                else if (req.done)
                {
                    var buffer = req.GetData <Color32>();
                    if (m_EncodeTempTex != null &&
                        m_EncodeTempTex.width != m_CameraParameters.cameraResolutionWidth &&
                        m_EncodeTempTex.height != m_CameraParameters.cameraResolutionHeight)
                    {
                        GameObject.Destroy(m_EncodeTempTex);
                        m_EncodeTempTex = null;
                    }
                    if (m_EncodeTempTex == null)
                    {
                        m_EncodeTempTex = new Texture2D(
                            m_CameraParameters.cameraResolutionWidth,
                            m_CameraParameters.cameraResolutionHeight,
                            TextureFormat.RGB24,
                            false
                            );
                    }
                    m_EncodeTempTex.SetPixels32(buffer.ToArray());
                    m_EncodeTempTex.Apply();

                    if (task.OnReceive != null)
                    {
                        if (m_EncodeTempTex.width != task.Width || m_EncodeTempTex.height != task.Height)
                        {
                            Texture2D scaledtexture;
                            NRDebugger.LogFormat("[BlendCamera] need to scale the texture which origin width:{0} and out put width:{1}",
                                                 m_EncodeTempTex.width, task.Width);
                            scaledtexture = ImageEncoder.ScaleTexture(m_EncodeTempTex, task.Width, task.Height);
                            CommitResult(scaledtexture, task);
                            //Destroy the scale temp texture.
                            GameObject.Destroy(scaledtexture);
                        }
                        else
                        {
                            CommitResult(m_EncodeTempTex, task);
                        }
                    }
                    m_Requests.Dequeue();
                    m_Tasks.Dequeue();
                }
                else
                {
                    break;
                }
            }
        }
Example #11
0
        // Update is called once per frame
        void Update()
        {
#if !UNITY_EDITOR
            // Check that motion tracking is tracking.
            if (NRFrame.SessionStatus != SessionState.Tracking)
            {
                return;
            }
#endif
            // Get updated augmented images for this frame.
            NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New);

            // Create visualizers and anchors for updated augmented images that are tracking and do not previously
            // have a visualizer. Remove visualizers for stopped images.
            foreach (var image in m_TempTrackingImages)
            {
                Visualizer visualizer = null;
                switch (image.GetDataBaseIndex())
                {
                case 0:
                    m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer);
                    if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null)
                    {
                        // based on which image is detected, display correct prefab
                        // Create an anchor to ensure that NRSDK keeps tracking this augmented image.
                        // Is img tracking persistant? will prefab vanish if cannot see book
                        visualizer                  = (Visualizer)Instantiate(flower1, image.GetCenterPose().position, image.GetCenterPose().rotation);
                        visualizer.pageFlower       = image;
                        visualizer.transform.parent = transform;
                        m_Visualizers.Add(image.GetDataBaseIndex(), visualizer);

                        NRDebugger.Log("Found flower1.jpg!");
                    }
                    else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null)
                    {
                        m_Visualizers.Remove(image.GetDataBaseIndex());
                        Destroy(visualizer.gameObject);
                    }
                    break;

                case 1:
                    m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer);
                    if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null)
                    {
                        // based on which image is detected, display correct prefab
                        // Create an anchor to ensure that NRSDK keeps tracking this augmented image.
                        // Is img tracking persistant? will prefab vanish if cannot see book
                        visualizer                  = (Visualizer)Instantiate(plane2, image.GetCenterPose().position, image.GetCenterPose().rotation);
                        visualizer.pageFlower       = image;
                        visualizer.transform.parent = transform;
                        m_Visualizers.Add(image.GetDataBaseIndex(), visualizer);

                        NRDebugger.Log("Found plane2.jpg!");
                    }
                    else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null)
                    {
                        m_Visualizers.Remove(image.GetDataBaseIndex());
                        Destroy(visualizer.gameObject);
                    }
                    break;

                default:
                    break;
                }
                FitToScanOverlay.SetActive(false);
            }
        }
Example #12
0
        public void UpdateSurface(IntPtr texture_id, UInt64 time_stamp)
        {
            var result = NativeApi.HWEncoderUpdateSurface(EncodeHandle, texture_id, time_stamp);

            NRDebugger.Log("[Encode] UpdateSurface :" + (result == 0).ToString());
        }
Example #13
0
        public void SetConfigration(NativeEncodeConfig config)
        {
            var result = NativeApi.HWEncoderSetConfigration(EncodeHandle, LitJson.JsonMapper.ToJson(config));

            NRDebugger.Log("[Encode] SetConfigration :" + (result == 0).ToString());
        }
 private void OnHMDLostTracking()
 {
     NRDebugger.Log("[NRHMDPoseTracker] OnHMDLostTracking:" + NRFrame.LostTrackingReason);
     ShowTips(TipType.LostTracking);
 }
 private void OnHMDPoseReady()
 {
     NRDebugger.Log("[NRHMDPoseTracker] OnHMDPoseReady");
     ShowTips(TipType.None);
 }
Example #16
0
        public void Update()
        {
#if UNITY_EDITOR
            if (virtualImageTrackingEnabled)
            {
                return;
            }
            else
            {
                TrackingImageVisualizer visualizer = null;
                NRDebugger.Log("Create new TrackingImageVisualizer!");
                visualizer = (TrackingImageVisualizer)Instantiate(
                    TrackingImageVisualizerPrefab,
                    new Vector3(0f, 0f, 0f),
                    Quaternion.identity);
                // visualizer.transform.parent = transform;
                visualizer.transform.parent        = null; // add to root of scene
                visualizer.transform.localPosition = new Vector3(0, 0, 5f);

                virtualImageTrackingEnabled = true; // set flag

                Destroy(gameObject);                // STOP image tracking

                return;
            }
#endif

#if !UNITY_EDITOR
            // Check that motion tracking is tracking.
            if (NRFrame.SessionStatus != SessionState.Running)
            {
                return;
            }
#endif
            // Get updated augmented images for this frame.
            NRFrame.GetTrackables <NRTrackableImage>(m_TempTrackingImages, NRTrackableQueryFilter.New);

            // Create visualizers and anchors for updated augmented images that are tracking and do not previously
            // have a visualizer. Remove visualizers for stopped images.
            foreach (var image in m_TempTrackingImages)
            {
                TrackingImageVisualizer visualizer = null;
                m_Visualizers.TryGetValue(image.GetDataBaseIndex(), out visualizer);
                if (image.GetTrackingState() == TrackingState.Tracking && visualizer == null)
                {
                    NRDebugger.Log("Create new TrackingImageVisualizer!");
                    // Create an anchor to ensure that NRSDK keeps tracking this augmented image.
                    visualizer       = (TrackingImageVisualizer)Instantiate(TrackingImageVisualizerPrefab, image.GetCenterPose().position, image.GetCenterPose().rotation);
                    visualizer.Image = image;
                    // visualizer.transform.parent = transform;
                    visualizer.transform.parent = null; // add to root of scene
                    m_Visualizers.Add(image.GetDataBaseIndex(), visualizer);
                    Destroy(gameObject);                // STOP image tracking
                }
                else if (image.GetTrackingState() == TrackingState.Stopped && visualizer != null)
                {
                    m_Visualizers.Remove(image.GetDataBaseIndex());
                    Destroy(visualizer.gameObject);
                }

                //FitToScanOverlay.SetActive(false);
            }
        }