Exemple #1
0
    private void Awake()
    {
        //singleton implementation

        //Check if instance already exists
        if (instance == null)
        {
            //if not, set instance to this
            instance = this;

            //init the VR app

#if !UNITY_EDITOR
            //Init the HMD. Without this, the WaveVR system doesn't get data from its sensors.
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.HMD_INITIAILZED);
#endif

            //init the app
            Screen.sleepTimeout         = SleepTimeout.NeverSleep;
            Application.targetFrameRate = 90; //Focus is actually 75, but with 90 things seem a little better to me :)
        }
        //If instance already exists and it's not this:
        else if (instance != this)
        {
            //Then destroy this. This enforces our singleton pattern, meaning there can only ever be one instance of a InstantPreviewer.
            Destroy(gameObject);
        }
    }
Exemple #2
0
    private void OnEnable()
    {
#if !UNITY_EDITOR
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_ENABLE);
#endif
        StartCoroutine("MainLoop");
    }
Exemple #3
0
    /// <summary>
    /// Main Loop: here we get the rototranslational data of the device (that will be streamed to Unity)
    /// And will show the preview on the device screen (streamed from Unity)
    /// </summary>
    /// <returns></returns>
    private IEnumerator MainLoop()
    {
#if !UNITY_EDITOR
        yield return(1);

        //we're about to render the first frame. This is necessary to make the app wake-up correctly if you remove the headset and put it on again
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.FIRST_FRAME);

        //loop forever
        while (true)
        {
            //Update the position of the device
            WaveVR.Instance.UpdatePoses(WVR_PoseOriginModel.WVR_PoseOriginModel_OriginOnGround);

            //wait the end of frame, so we can play a bit with textures
            yield return(new WaitForEndOfFrame());

            //for each eye (0 = left, 1 = right)
            for (int i = 0; i < 2; i++)
            {
                //notify WaveVR that we want to show the content of the render texture associated with one of the two cameras of the scene.
                //Each camera in the scene has in front of it a big quad, big as its near plane, with half of the texture of the Game Area sent by Unity.
                //This means that the left camera will frame the left part of the screen sent by Unity, and the right camera the right part.
                //Every camera will render this onto a RenderTexture that we'll now send to the ViveWave system, that will draw them onto the screen.
                //Basically we're taking the screen sent by Unity, we're splitting it into half and we're rendering it onto the screen of the Vive Focus device
                WaveVR_Utils.SetRenderTexture(currentRt[i].GetNativeTexturePtr());
                WaveVR_Utils.SendRenderEventNative(i == 0 ? WaveVR_Utils.k_nRenderEventID_SubmitL : WaveVR_Utils.k_nRenderEventID_SubmitR);
                WaveVR_Utils.SendRenderEventNative(i == 0 ? WaveVR_Utils.k_nRenderEventID_RenderEyeEndL : WaveVR_Utils.k_nRenderEventID_RenderEyeEndR);
            }
        }
#else
        yield break;
#endif
    }
Exemple #4
0
    private void OnDisable()
    {
#if !UNITY_EDITOR
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_DISABLE);
#endif
        StopCoroutine("MainLoop");
    }
Exemple #5
0
    void OnDisable()
    {
        Log.d(LOG_TAG, "OnDisable()+");
        enableRenderLoop(false);
#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_DISABLE);
            if (synchronizer != null)
            {
                synchronizer.sync();
            }
        }
        WaveVR_Utils.Event.Remove("IpdChanged", OnIpdChanged);
        setLoadingCanvas(false);

        if (lefteye != null)
        {
            lefteye.getCamera().targetTexture = null;
        }
        if (righteye != null)
        {
            righteye.getCamera().targetTexture = null;
        }
        if (textureManager != null)
        {
            textureManager.ReleaseTexturePools();
        }

        Log.d(LOG_TAG, "OnDisable()-");
    }
Exemple #6
0
    void Awake()
    {
        Log.d(LOG_TAG, "Awake()+");
        if (instance == null)
        {
            instance = this;
        }
        else
        {
            Log.w(LOG_TAG, "Render already Awaked");
        }
        synchronizer = new RenderThreadSynchronizer();

        if (globalOrigin >= 0 && globalOrigin <= 3)
        {
            _origin = (WVR_PoseOriginModel)globalOrigin;
            Log.d(LOG_TAG, "Has global tracking space " + _origin);
        }

#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            if (WaveVR_Init.Instance == null || WaveVR.Instance == null)
            {
                Log.e(LOG_TAG, "Fail to initialize");
            }

            // This command can make sure native's render code are initialized in render thread.
            InitializeGraphic(synchronizer);

            // Setup render values
            uint w = 0, h = 0;
            Interop.WVR_GetRenderTargetSize(ref w, ref h);
            sceneWidth  = (float)w;
            sceneHeight = (float)h;

            projRawL = new float[4] {
                0.0f, 0.0f, 0.0f, 0.0f
            };
            projRawR = new float[4] {
                0.0f, 0.0f, 0.0f, 0.0f
            };

            OnIpdChanged(null);
        }

        // May call eglMakeCurrent inside TextureManager()
        if (textureManager == null)
        {
            textureManager = new TextureManager();
        }

        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.HMD_INITIAILZED);

        Screen.sleepTimeout         = SleepTimeout.NeverSleep;
        Application.targetFrameRate = targetFPS;
        Log.d(LOG_TAG, "Awake()-");
    }
Exemple #7
0
 void OnDestroy()
 {
     Log.d(LOG_TAG, "OnDestroy()+");
     textureManager = null;
     instance       = null;
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_DESTROY);
     Log.d(LOG_TAG, "OnDestroy()-");
 }
Exemple #8
0
 void OnApplicationQuit()
 {
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_QUIT);
     if (synchronizer != null)
     {
         synchronizer.sync();
     }
 }
Exemple #9
0
 void OnEnable()
 {
     Log.d(LOG_TAG, "OnEnable()+");
     WaveVR_Utils.Event.Listen("IpdChanged", OnIpdChanged);
     enableRenderLoop(true);
     setLoadingCanvas(true);
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_ENABLE);
     Log.d(LOG_TAG, "OnEnable()-");
 }
Exemple #10
0
 public void ResumeUnity()
 {
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_RESUME);
     if (synchronizer != null)
     {
         synchronizer.sync();
     }
     setLoadingCanvas(true);
     enableRenderLoop(true);
 }
Exemple #11
0
    void OnApplicationQuit()
    {
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_QUIT);
        if (synchronizer != null)
        {
            synchronizer.sync();
        }
#if UNITY_EDITOR
        WaveVR.EndSimulator();
#endif
    }
Exemple #12
0
 private void OnApplicationPause(bool pauseStatus)
 {
     if (pauseStatus)
     {
         WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_PAUSE);
         StopCoroutine("MainLoop");
     }
     else
     {
         WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_RESUME);
         StartCoroutine("MainLoop");
     }
 }
Exemple #13
0
    public void PauseUnity()
    {
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_PAUSE);
        if (synchronizer != null)
        {
            synchronizer.sync();
        }
        lefteye.getCamera().targetTexture  = null;
        righteye.getCamera().targetTexture = null;
        textureManager.ReleaseTexturePools();

        setLoadingCanvas(true);
        enableRenderLoop(false);
    }
Exemple #14
0
    void OnDisable()
    {
        Log.d(LOG_TAG, "OnDisable()+");
        enableRenderLoop(false);
#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_DISABLE);
        }
        WaveVR_Utils.Event.Remove("IpdChanged", OnIpdChanged);
        setLoadingCanvas(false);
        Log.d(LOG_TAG, "OnDisable()-");
    }
Exemple #15
0
    void OnApplicationPause(bool pauseStatus)
    {
        Log.d(LOG_TAG, "Pause(" + pauseStatus + ")");

        if (pauseStatus)
        {
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_PAUSE);
        }
        else
        {
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_RESUME);
        }

        setLoadingCanvas(true);
        enableRenderLoop(!pauseStatus);
    }
Exemple #16
0
    void OnApplicationPause(bool pauseStatus)
    {
        Log.d(LOG_TAG, "Pause(" + pauseStatus + ")");

        if (pauseStatus)
        {
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_PAUSE);
            if (synchronizer != null)
            {
                synchronizer.sync();
            }
            lefteye.getCamera().targetTexture  = null;
            righteye.getCamera().targetTexture = null;
            textureManager.ReleaseTexturePools();
        }
        else
        {
            WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_RESUME);
        }

        setLoadingCanvas(true);
        enableRenderLoop(!pauseStatus);
    }
Exemple #17
0
    void Awake()
    {
        Log.d(LOG_TAG, "Awake()+");
        Log.d(LOG_TAG, "Version of the runtime: " + Application.unityVersion);
        if (instance == null)
        {
            instance = this;
        }
        else
        {
            Log.w(LOG_TAG, "Render already Awaked");
        }

        QualitySettings.SetQualityLevel(QualitySettings.GetQualityLevel(), true);
        synchronizer = new RenderThreadSynchronizer();

        if (globalOrigin >= 0 && globalOrigin <= 3)
        {
            _origin = (WVR_PoseOriginModel)globalOrigin;
            Log.d(LOG_TAG, "Has global tracking space " + _origin);
        }

        if (WaveVR_Init.Instance == null || WaveVR.Instance == null)
        {
            Log.e(LOG_TAG, "Fail to initialize");
        }
#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            // This command can make sure native's render code are initialized in render thread.
            qSColorSpace = QualitySettings.activeColorSpace;
            // InitializeGraphic(synchronizer);

            // Setup render values
            uint w = 0, h = 0;
            Interop.WVR_GetRenderTargetSize(ref w, ref h);
            sceneWidth  = (float)w;
            sceneHeight = (float)h;

            projRawL = new float[4] {
                0.0f, 0.0f, 0.0f, 0.0f
            };
            projRawR = new float[4] {
                0.0f, 0.0f, 0.0f, 0.0f
            };

            WVR_RenderProps_t props = new WVR_RenderProps_t();
            Interop.WVR_GetRenderProps(ref props);
            targetFPS = (int)props.refreshRate;

            OnIpdChanged(null);
        }

        // May call eglMakeCurrent inside TextureManager()
        // First time, we delay InitializeGraphic and construct TextureManager to render loop and skip 16 frame for Unity 2018 version.
        // Otherwise, keep the original flow.
        if (isInitializeGraphic && textureManager == null)
        {
            textureManager = new TextureManager();
        }

        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.HMD_INITIAILZED);

        Screen.sleepTimeout         = SleepTimeout.NeverSleep;
        Application.targetFrameRate = targetFPS;
        Log.d(LOG_TAG, "Awake()-");
    }
Exemple #18
0
    private IEnumerator RenderLoop()
    {
        if (cachedWaitForEndOfFrame == null)
        {
            cachedWaitForEndOfFrame = new WaitForEndOfFrame();
        }
        yield return(cachedWaitForEndOfFrame);

        yield return(cachedWaitForEndOfFrame);

        if (isInitializeGraphic == false)
        {
            InitializeGraphic();
            // sync here to wait InitializeGraphic done because InitializeGraphic is migration to render thread.
            synchronizer.sync();
            textureManager      = new TextureManager();
            isInitializeGraphic = true;
        }

        Log.d(LOG_TAG, "RenderLoop() is started");
        var tim = Time.realtimeSinceStartup;

#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            WaveVR_Utils.WVR_SetPerformanceLevels(cpuPerfLevel, gpuPerfLevel);
            Log.i(LOG_TAG, "SetPerformanceLevels cpuPerfLevel " + cpuPerfLevel + " gpuPerfLevel " + gpuPerfLevel);
            // Restart ATW thread before rendering.
            while (!WaveVR_Utils.WVR_IsATWActive())
            {
                yield return(cachedWaitForEndOfFrame);

                if (surfaceChanged && isNeedTimeout == false)
                {
                    break;
                }
                if (Time.realtimeSinceStartup - tim > 1.0f)
                {
                    Log.w(LOG_TAG, "Waiting for surface change is timeout.");
                    break;
                }
            }
            // Reset isNeedTimeout flag
            isNeedTimeout = false;

            if (textureManager != null)
            {
                if (!textureManager.validate())
                {
                    textureManager.reset();
                }
            }
        }
        Log.d(LOG_TAG, "First frame");
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.FIRST_FRAME);

        setLoadingCanvas(false);
        Log.d(LOG_TAG, "RenderLoop() is running");

        while (true)
        {
            Log.gpl.d(LOG_TAG, "RenderLoop() is still running");
            WaveVR_Utils.Trace.BeginSection("RenderLoop", false);
#if UNITY_EDITOR
            if (Application.isEditor)
            {
                if (WaveVR.Instance.isSimulatorOn)
                {
                    WaveVR.Instance.UpdatePoses(origin, true);
                }
                else
                {
                    WaveVR_Utils.Event.Send(WaveVR_Utils.Event.NEW_POSES, new WVR_DevicePosePair_t[0], new WaveVR_Utils.RigidTransform[0]);
                    WaveVR_Utils.Event.Send(WaveVR_Utils.Event.AFTER_NEW_POSES);
                }
                if (textureManager != null)
                {
                    textureManager.Next();
                }
            }
            else
#endif
            {
                WaveVR.Instance.UpdatePoses(origin);
                // Set next texture before running any graphic command.
                if (textureManager != null)
                {
                    textureManager.Next();
                }
            }

            if (configurationChanged)
            {
                WaveVR_Render.Expand(this);
                configurationChanged = false;
            }

            RenderEye(lefteye.getCamera(), WVR_Eye.WVR_Eye_Left);
            RenderEye(righteye.getCamera(), WVR_Eye.WVR_Eye_Right);
            WaveVR_Utils.Trace.EndSection(false);

            // Put here to control the time of next frame.
            TimeControl();

            Log.gpl.d(LOG_TAG, "End of frame");
            yield return(cachedWaitForEndOfFrame);
        }
    }
Exemple #19
0
    private IEnumerator RenderLoop()
    {
        var wait = new WaitForEndOfFrame();

        yield return(wait);

        Log.d(LOG_TAG, "RenderLoop() is started");
        var tim = Time.time;

#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        {
            // Restart ATW thread before rendering.
            while (!WaveVR_Utils.WVR_IsATWActive())
            {
                yield return(wait);

                if (surfaceChanged && isNeedTimeout == false)
                {
                    break;
                }
                if (Time.time - tim > 1.0f)
                {
                    Log.w(LOG_TAG, "Waiting for surface change is timeout.");
                    break;
                }
            }
            // Reset isNeedTimeout flag
            isNeedTimeout = false;

            if (textureManager != null)
            {
                if (!textureManager.validate())
                {
                    textureManager.reset();
                }
            }
        }
        Log.d(LOG_TAG, "First frame");
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.FIRST_FRAME);

        setLoadingCanvas(false);
        Log.d(LOG_TAG, "RenderLoop() is running");

        while (true)
        {
            Log.gpl.d(LOG_TAG, "RenderLoop() is still running");
            WaveVR_Utils.Trace.BeginSection("RenderLoop", false);
#if UNITY_EDITOR
            if (Application.isEditor)
            {
                WaveVR_Utils.Event.Send(WaveVR_Utils.Event.NEW_POSES, new WVR_DevicePosePair_t[0], new WaveVR_Utils.RigidTransform[0]);
                WaveVR_Utils.Event.Send(WaveVR_Utils.Event.AFTER_NEW_POSES);
                textureManager.Next();
            }
            else
#endif
            {
                WaveVR.Instance.UpdatePoses(origin);
                // Set next texture before running any graphic command.
                textureManager.Next();
            }

            if (configurationChanged)
            {
                WaveVR_Render.Expand(this);
                configurationChanged = false;
            }

            RenderEye(lefteye.getCamera(), WVR_Eye.WVR_Eye_Left);
            RenderEye(righteye.getCamera(), WVR_Eye.WVR_Eye_Right);
            WaveVR_Utils.Trace.EndSection(false);

            // Put here to control the time of next frame.
            TimeControl();

            Log.gpl.d(LOG_TAG, "End of frame");
            yield return(wait);
        }
    }
Exemple #20
0
 void OnApplicationQuit()
 {
     WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.UNITY_APPLICATION_QUIT);
 }