void Start()
 {
     if (isDirty)
     {
         return;
     }
     NRDebugger.Log("[SessionBehaviour DelayStart: StartSession]");
     NRSessionManager.Instance.StartSession();
     NRSessionManager.Instance.SetConfiguration(SessionConfig);
 }
        private static void AddAssetsToNewTrackingImageDatabase()
        {
            var  selectedImagePaths        = new List <string>();
            bool unsupportedImagesSelected = false;

            selectedImagePaths = GetSelectedImagePaths(out unsupportedImagesSelected);
            if (unsupportedImagesSelected)
            {
                var message = string.Format("Some selected images could not be added to the TrackingImageDatabase because " +
                                            "they are not in a supported format.  Supported image formats are {0}.",
                                            k_SupportedImageFormatListMessage);
                Debug.LogWarningFormat(message);
                EditorUtility.DisplayDialog("Unsupported Images Selected", message, "Ok");
            }

            if (!Directory.Exists(MarkerDataOutPath))
            {
                NRDebugger.Log("Directory is not exist, create a new one...");
                Directory.CreateDirectory(MarkerDataOutPath);
            }

            AssetDatabase.Refresh();

            string out_path = MarkerDataOutPath + System.Guid.NewGuid().ToString();

            if (!Directory.Exists(out_path))
            {
                NRDebugger.Log("Directory is not exist, create a new one...");
                Directory.CreateDirectory(out_path);
            }

            string binary_path;

            if (!NRTrackingImageDatabase.FindCliBinaryPath(out binary_path))
            {
                return;
            }

            foreach (var item in selectedImagePaths)
            {
                string image_path = Application.dataPath.Substring(0, Application.dataPath.Length - 6) + item;
                s_QualityBackgroundExecutor.PushJob(() =>
                {
                    string param = string.Format("-image_path={0} -save_dir={1} -width={2}",
                                                 image_path, out_path, 400).Trim();
                    string result = string.Empty;
                    string error  = string.Empty;

                    ShellHelper.RunCommand(binary_path, param, out result, out error);
                    Debug.Log(string.Format("result : {0} error : {1}", result, error));
                });
            }

            AssetDatabase.Refresh();
        }
Beispiel #3
0
        /**
         * @brief Release the rgb camera.
         */
        public static void Release()
        {
            if (m_NativeCamera != null)
            {
                NRDebugger.Log("[NRRgbCamera] Start to Release");
#if !UNITY_EDITOR
                m_NativeCamera.Release();
#endif
                isRGBCamStart = false;
            }
        }
Beispiel #4
0
        private void CreateRenderTextures()
        {
            var resolution = NRDevice.Instance.NativeHMD.GetEyeResolution(NativeEye.LEFT);

            NRDebugger.Log("[CreateRenderTextures]  resolution :" + resolution.ToString());

            for (int i = 0; i < EyeTextureCount; i++)
            {
                eyeTextures[i] = GenRenderTexture(resolution.width, resolution.height);
                m_RTDict.Add(eyeTextures[i], eyeTextures[i].GetNativeTexturePtr());
            }
        }
Beispiel #5
0
        /**
         * @brief Stop the rgb camera.
         */
        public static void Stop()
        {
            if (!isRGBCamStart)
            {
                return;
            }
            NRDebugger.Log("[NRRgbCamera] Start to Stop");
#if !UNITY_EDITOR
            m_NativeCamera.StopCapture();
#endif
            isRGBCamStart = false;
        }
Beispiel #6
0
        private static void SetImageFormat(CameraImageFormat format)
        {
            if (!isInitiate)
            {
                Initialize();
            }
#if !UNITY_EDITOR
            m_NativeCamera.SetImageFormat(format);
#endif
            ImageFormat = format;
            NRDebugger.Log("[NRRgbCamera] SetImageFormat : " + format.ToString());
        }
Beispiel #7
0
        public NativeResult GetEyePose(ref Pose outLeftEyePose, ref Pose outRightEyePose)
        {
            NativeMat4f  lefteyepos  = new NativeMat4f(Matrix4x4.identity);
            NativeMat4f  righteyepos = new NativeMat4f(Matrix4x4.identity);
            NativeResult result      = NativeApi.NRHeadTrackingGetEyePose(m_NativeInterface.TrackingHandle, headTrackingHandle, ref lefteyepos, ref righteyepos);

            if (result == NativeResult.Success)
            {
                ConversionUtility.ApiPoseToUnityPose(lefteyepos, out outLeftEyePose);
                ConversionUtility.ApiPoseToUnityPose(righteyepos, out outRightEyePose);
            }
            NRDebugger.Log("[NativeHeadTracking] GetEyePose :" + result);
            return(result);
        }
Beispiel #8
0
        new void Awake()
        {
            base.Awake();

            if (isDirty)
            {
                return;
            }
#if !UNITY_EDITOR
            NRDebugger.EnableLog = Debug.isDebugBuild;
#endif
            NRDebugger.Log("[SessionBehaviour Awake: CreateSession]");
            NRSessionManager.Instance.CreateSession(this);
        }
        new void Awake()
        {
            base.Awake();
            if (isDirty)
            {
                return;
            }

            NRDebugger.Log("[NRVirtualDisplayer] Awake");
            this.IsPlaying = false;
#if UNITY_EDITOR
            this.m_UICamera.enabled = false;
#endif
            this.Init();
        }
Beispiel #10
0
        /**
         * @brief Release the rgb camera.
         */
        public static void Release()
        {
            if (m_NativeCamera != null)
            {
                NRDebugger.Log("[NRRgbCamera] Start to Release");
#if !UNITY_EDITOR
                m_NativeCamera.Release();
                m_NativeCamera = null;
#endif
                OnError       = null;
                OnImageUpdate = null;
                isInitiate    = false;
                isRGBCamStart = false;
            }
        }
 private void InitNativeController()
 {
     m_NativeController = new NativeController();
     if (m_NativeController.Init())
     {
         Inited = true;
         NRDebugger.Log("NRControllerProvider Init Succeed");
     }
     else
     {
         m_NativeController = null;
         NRDebugger.LogError("NRControllerProvider Init Failed !!");
     }
     m_NeedInit = false;
 }
Beispiel #12
0
        public void SetConfiguration(NRSessionConfig config)
        {
            if (config == null)
            {
                return;
            }

#if !UNITY_EDITOR
            AsyncTaskExecuter.Instance.RunAction(() =>
            {
                NRDebugger.Log("AsyncTaskExecuter: UpdateConfig");
                NativeAPI.Configration.UpdateConfig(config);
            });
#endif
        }
        internal void SetCameraPermission()
        {
            var manifest = SelectSingleNode("/manifest");

            if (!manifest.InnerXml.Contains("android.permission.CAMERA"))
            {
                XmlElement child = CreateElement("uses-permission");
                manifest.AppendChild(child);
                XmlAttribute newAttribute = CreateAndroidAttribute("name", "android.permission.CAMERA");
                child.Attributes.Append(newAttribute);
            }
            else
            {
                NRDebugger.Log("Already has the camera permission.");
            }
        }
Beispiel #14
0
        /// <summary>
        /// Start to play rgb camera.
        /// </summary>
        public static void Play()
        {
            if (!m_IsInitialized)
            {
                Initialize();
            }
            if (m_IsPlaying)
            {
                return;
            }
            NRDebugger.Log("[NRRgbCamera] Start to play");
#if !UNITY_EDITOR
            m_NativeCamera.StartCapture();
#endif
            m_IsPlaying = true;
        }
Beispiel #15
0
        /// <summary>
        /// Start to play rgb camera.
        /// </summary>
        public static void Play()
        {
            if (!isInitiate)
            {
                Initialize();
            }
            if (isRGBCamStart)
            {
                return;
            }
            NRDebugger.Log("[NRRgbCamera] Start to play");
#if !UNITY_EDITOR
            m_NativeCamera.StartCapture();
#endif
            isRGBCamStart = true;
        }
        private void InitNativeController()
        {
            m_NativeController = new NativeController();
#if !UNITY_EDITOR_OSX
            if (m_NativeController.Init())
            {
                Inited = true;
                NRDebugger.Log("NRControllerProvider Init Succeed");
            }
            else
            {
                m_NativeController = null;
                Debug.LogError("NRControllerProvider Init Failed !!");
            }
#endif
            m_NeedInit = false;
        }
Beispiel #17
0
        /// <summary>
        /// Stop the rgb camera.
        /// </summary>
        public static void Stop()
        {
            if (!m_IsPlaying)
            {
                return;
            }
            NRDebugger.Log("[NRRgbCamera] Start to Stop");

            // If there is no a active texture, pause and release camera resource.
            if (m_ActiveTextures.Count == 0)
            {
                m_IsPlaying = false;
#if !UNITY_EDITOR
                m_NativeCamera.StopCapture();
#endif
                Release();
            }
        }
Beispiel #18
0
        private void DeployData()
        {
            if (NRSessionBehaviour.SessionConfig == null)
            {
                return;
            }
            var database = NRSessionBehaviour.SessionConfig.TrackingImageDatabase;

            if (database == null)
            {
                NRDebugger.Log("augmented image data base is null!");
                return;
            }
            string deploy_path = database.TrackingImageDataOutPutPath;

            NRDebugger.Log("[TrackingImageDatabase] DeployData to path :" + deploy_path);
            ZipUtility.UnzipFile(database.RawData, deploy_path, NativeConstants.ZipKey);
        }
Beispiel #19
0
        /// <summary>
        /// Stop the rgb camera.
        /// </summary>
        public static void Stop()
        {
            if (!isRGBCamStart)
            {
                return;
            }
            NRDebugger.Log("[NRRgbCamera] Start to Stop");
#if !UNITY_EDITOR
            m_NativeCamera.StopCapture();
#endif
            isRGBCamStart = false;

            // If there is no a active texture, release camera resource.
            if (m_ActiveTextures.Count == 0)
            {
                Release();
            }
        }
Beispiel #20
0
        /// <summary>
        /// Release the rgb camera.
        /// </summary>
        public static void Release()
        {
            if (m_NativeCamera == null)
            {
                return;
            }

            NRDebugger.Log("[NRRgbCamera] Start to Release");
#if !UNITY_EDITOR
            m_NativeCamera.Release();
            m_NativeCamera = null;
#endif
            m_CurrentFrame.data = null;
            OnError             = null;
            OnImageUpdate       = null;
            m_IsInitialized     = false;
            m_IsPlaying         = false;
        }
        internal void SetBlueToothPermission()
        {
            var manifest = SelectSingleNode("/manifest");

            if (!manifest.InnerXml.Contains("android.permission.BLUETOOTH"))
            {
                XmlElement child = CreateElement("uses-permission");
                manifest.AppendChild(child);
                XmlAttribute newAttribute = CreateAndroidAttribute("name", "android.permission.BLUETOOTH");
                child.Attributes.Append(newAttribute);
                newAttribute = CreateAndroidAttribute("name", "android.permission.BLUETOOTH_ADMIN");
                child.Attributes.Append(newAttribute);
            }
            else
            {
                NRDebugger.Log("Already has the bluetooth permission.");
            }
        }
        private void InitNativeController()
        {
            m_NativeController = new NativeController();
            if (m_NativeController.Init())
            {
                Inited = true;
                NRDebugger.Log("NRControllerProvider Init Succeed");
            }
            else
            {
                m_NativeController = null;
                NRDebugger.LogError("NRControllerProvider Init Failed !!");
            }

#if !UNITY_EDITOR
            NRDebugger.Log("[NRInput] version:" + GetVersion(0));
#endif
            m_NeedInit = false;
        }
        private void Init()
        {
            if (m_IsInit)
            {
                return;
            }
            NRDebugger.Log("[MultiScreenController] Init.");
            NRDevice.Instance.Init();
            m_ScreenResolution          = NRPhoneScreen.Resolution;
            transform.position          = m_StartPos;
            m_UICamera.aspect           = m_ScreenResolution.x / m_ScreenResolution.y;
            m_UICamera.orthographicSize = 6;

            m_ControllerScreen       = new RenderTexture((int)m_ScreenResolution.x, (int)m_ScreenResolution.y, 24);
            m_UICamera.targetTexture = m_ControllerScreen;
            NRSessionManager.Instance.VirtualDisplayer = this;

#if UNITY_ANDROID && !UNITY_EDITOR
            m_RenderTexturePtr = m_ControllerScreen.GetNativeTexturePtr();
            NativeMultiDisplay = new NativeMultiDisplay();
            NativeMultiDisplay.Create();
            // Creat multiview controller..
            GL.IssuePluginEvent(RenderThreadHandlePtr, 0);
#elif UNITY_EDITOR
            InitEmulator();
#endif
            if (m_VirtualController)
            {
                m_VirtualController.Init();
            }
            NRDebugger.Log("[MultiScreenController] Init successed.");
            m_IsInit = true;
            if (OnMultiDisplayInited != null)
            {
                OnMultiDisplayInited();
            }

            StartCoroutine(VirtualDisplayRender());

            m_IsPlay = true;
        }
Beispiel #24
0
        public void StartSession()
        {
            if (SessionState == SessionState.Running ||
                SessionState == SessionState.UnInitialized ||
                SessionState == SessionState.Destroyed)
            {
                return;
            }
            var config = NRSessionBehaviour.SessionConfig;

            if (config != null)
            {
                SetAppSettings(config.OptimizedRendering);
#if !UNITY_EDITOR
                if (config.OptimizedRendering)
                {
                    if (NRSessionBehaviour.gameObject.GetComponent <NRRenderer>() == null)
                    {
                        NRRenderer = NRSessionBehaviour.gameObject.AddComponent <NRRenderer>();
                        NRRenderer.Initialize(NRHMDPoseTracker.leftCamera, NRHMDPoseTracker.rightCamera);
                    }
                }
#endif
            }
            else
            {
                SetAppSettings(false);
            }

            AsyncTaskExecuter.Instance.RunAction(() =>
            {
                NRDebugger.Log("AsyncTaskExecuter: start tracking");
                NativeAPI.NativeTracking.Start();
                NativeAPI.NativeHeadTracking.Create();
                SessionState = SessionState.Running;
            });

#if UNITY_EDITOR
            InitEmulator();
#endif
        }
Beispiel #25
0
        /// @endcond

        public static void Initialize()
        {
            if (isInitiate)
            {
                return;
            }
            NRDebugger.Log("[NRRgbCamera] Initialize");
            m_NativeCamera = new NativeCamera();
#if !UNITY_EDITOR
            m_NativeCamera.Create();
            m_NativeCamera.SetCaptureCallback(Capture);
#endif
            FramePool           = new ObjectPool();
            FramePool.InitCount = 10;
            m_RGBFrames         = new FixedSizedQueue(FramePool);
            m_RGBFrames.Limit   = 5;

            isInitiate = true;

            SetImageFormat(CameraImageFormat.RGB_888);
        }
        private void Init()
        {
            if (m_IsInit)
            {
                return;
            }

            try
            {
                NRDevice.Instance.Init();
            }
            catch (Exception e)
            {
                NRDebugger.LogError("[NRVirtualDisplayer] NRDevice init error:" + e.ToString());
                throw;
            }

            transform.position = m_StartPos;
            this.SetVirtualDisplayResolution();
            NRSessionManager.Instance.VirtualDisplayer = this;
            NRDebugger.Log("[NRVirtualDisplayer] Init");

#if !UNITY_EDITOR
            //m_RenderTexturePtr = m_ControllerScreen.GetNativeTexturePtr();
            NativeMultiDisplay = new NativeMultiDisplay();
            NativeMultiDisplay.Create();
            //NativeMultiDisplay.InitColorSpace();
            NativeMultiDisplay.ListenMainScrResolutionChanged(OnDisplayResolutionChanged);
            NativeMultiDisplay.Start();
            // Creat multiview controller..
            //GL.IssuePluginEvent(RenderThreadHandlePtr, 0);
            //LoadPhoneScreen();
#elif UNITY_EDITOR
            InitEmulator();
#endif
            m_VirtualController?.Init();
            m_IsInit = true;
            OnMultiDisplayInited?.Invoke();
            IsPlaying = true;
        }
Beispiel #27
0
        private Task UpdateImageTrackingConfig(NRSessionConfig config)
        {
            return(Task.Run(() =>
            {
                switch (config.ImageTrackingMode)
                {
                case TrackableImageFindingMode.DISABLE:
                    var result = SetTrackableImageDataBase(m_ConfigHandle, 0);
                    if (result)
                    {
                        m_TrackableImageDatabaseDict.Clear();
                    }
                    NRDebugger.Log("[NativeConfigration] Disable trackable image result : " + result);
                    break;

                case TrackableImageFindingMode.ENABLE:
                    if (config.TrackingImageDatabase == null)
                    {
                        return;
                    }

                    if (!m_TrackableImageDatabaseDict.TryGetValue(config.TrackingImageDatabase.GUID, out m_DatabaseHandle))
                    {
                        DeployData(config.TrackingImageDatabase);
                        m_DatabaseHandle = m_NativeTrackableImage.CreateDataBase();
                        m_TrackableImageDatabaseDict.Add(config.TrackingImageDatabase.GUID, m_DatabaseHandle);
                    }
                    result = m_NativeTrackableImage.LoadDataBase(m_DatabaseHandle, config.TrackingImageDatabase.TrackingImageDataPath);
                    NRDebugger.LogFormat("[NativeConfigration] LoadDataBase path:{0} result:{1} ", config.TrackingImageDatabase.TrackingImageDataPath, result);
                    result = SetTrackableImageDataBase(m_ConfigHandle, m_DatabaseHandle);
                    NRDebugger.Log("[NativeConfigration] SetTrackableImageDataBase result : " + result);
                    break;

                default:
                    break;
                }
            }));
        }
Beispiel #28
0
        private IEnumerator StartUp()
        {
            var virtualDisplay = GameObject.FindObjectOfType <NRVirtualDisplayer>();

            while (virtualDisplay == null || !virtualDisplay.IsPlaying)
            {
                NRDebugger.Log("[NRRender] Wait virtual display ready...");
                yield return(new WaitForEndOfFrame());

                if (virtualDisplay == null)
                {
                    virtualDisplay = GameObject.FindObjectOfType <NRVirtualDisplayer>();
                }
            }

            yield return(new WaitForEndOfFrame());

            yield return(new WaitForEndOfFrame());

            yield return(new WaitForEndOfFrame());

            NRDebugger.Log("[NRRender] StartUp");
            CreateRenderTextures();

            leftCamera.enabled  = true;
            rightCamera.enabled = true;
#if !UNITY_EDITOR
            m_NativeRenderring = new NativeRenderring();
            m_NativeRenderring.Create();
#if !UNITY_STANDALONE_WIN
            m_NativeRenderring.InitColorSpace();
#endif
            StartCoroutine(RenderCoroutine());
#endif
            m_CurrentState = RendererState.Running;
            GL.IssuePluginEvent(RenderThreadHandlePtr, STARTNATIVERENDEREVENT);
        }
Beispiel #29
0
        /// <summary>
        /// Initialize the render pipleline.
        /// </summary>
        /// <param name="leftcamera">Left Eye.</param>
        /// <param name="rightcamera">Right Eye.</param>
        /// <param name="poseprovider">provide the pose of camera every frame.</param>
        public void Initialize(Camera leftcamera, Camera rightcamera)
        {
            NRDebugger.Log("[NRRender] Initialize");
            if (m_CurrentState != RendererState.UnInitialized)
            {
                return;
            }

            NRSessionManager.SetAppSettings(true);

            leftCamera  = leftcamera;
            rightCamera = rightcamera;

#if !UNITY_EDITOR
            leftCamera.depthTextureMode  = DepthTextureMode.Depth;
            rightCamera.depthTextureMode = DepthTextureMode.Depth;
            leftCamera.rect     = new Rect(0, 0, 1, 1);
            rightCamera.rect    = new Rect(0, 0, 1, 1);
            leftCamera.enabled  = false;
            rightCamera.enabled = false;
            m_CurrentState      = RendererState.Initialized;
            StartCoroutine(StartUp());
#endif
        }
        // only worked at 3dof mode
        public void Recenter()
        {
            var result = NativeApi.NRTrackingRecenter(m_TrackingHandle);

            NRDebugger.Log("[NativeSession Recenter] :" + result.ToString());
        }