Ejemplo n.º 1
0
    /// <summary>
    /// Main Loop: here we get the rototranslational data of the device (that will be streamed to Unity)
    /// And will show the preview on the device screen (streamed from Unity)
    /// </summary>
    /// <returns></returns>
    private IEnumerator MainLoop()
    {
#if !UNITY_EDITOR
        yield return(1);

        //we're about to render the first frame. This is necessary to make the app wake-up correctly if you remove the headset and put it on again
        WaveVR_Utils.IssueEngineEvent(WaveVR_Utils.EngineEventID.FIRST_FRAME);

        //loop forever
        while (true)
        {
            //Update the position of the device
            WaveVR.Instance.UpdatePoses(WVR_PoseOriginModel.WVR_PoseOriginModel_OriginOnGround);

            //wait the end of frame, so we can play a bit with textures
            yield return(new WaitForEndOfFrame());

            //for each eye (0 = left, 1 = right)
            for (int i = 0; i < 2; i++)
            {
                //notify WaveVR that we want to show the content of the render texture associated with one of the two cameras of the scene.
                //Each camera in the scene has in front of it a big quad, big as its near plane, with half of the texture of the Game Area sent by Unity.
                //This means that the left camera will frame the left part of the screen sent by Unity, and the right camera the right part.
                //Every camera will render this onto a RenderTexture that we'll now send to the ViveWave system, that will draw them onto the screen.
                //Basically we're taking the screen sent by Unity, we're splitting it into half and we're rendering it onto the screen of the Vive Focus device
                WaveVR_Utils.SetRenderTexture(currentRt[i].GetNativeTexturePtr());
                WaveVR_Utils.SendRenderEventNative(i == 0 ? WaveVR_Utils.k_nRenderEventID_SubmitL : WaveVR_Utils.k_nRenderEventID_SubmitR);
                WaveVR_Utils.SendRenderEventNative(i == 0 ? WaveVR_Utils.k_nRenderEventID_RenderEyeEndL : WaveVR_Utils.k_nRenderEventID_RenderEyeEndR);
            }
        }
#else
        yield break;
#endif
    }
Ejemplo n.º 2
0
    private void RenderEye(Camera camera, WVR_Eye eye)
    {
        WaveVR_Utils.Trace.BeginSection("Render_" + eye);
        Log.gpl.d(LOG_TAG, "Render_" + eye);

        bool isleft = eye == WVR_Eye.WVR_Eye_Left;

#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        WaveVR_Utils.SendRenderEventNative(isleft ?
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeL :
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeR);
        WaveVR_CanvasEye.changeEye(camera);
        camera.enabled = true;
        RenderTexture rt = textureManager.GetRenderTexture(isleft);
        camera.targetTexture = rt;
        camera.Render();
        camera.enabled = false;
#if UNITY_EDITOR
        if (Application.isEditor)
        {
            distortion.RenderEye(eye, rt);
            return;
        }
#endif
        // Do submit
        WaveVR_Utils.SetRenderTexture(isleft ?
                                      textureManager.left.currentPtr :
                                      textureManager.right.currentPtr);

        WaveVR_Utils.SendRenderEventNative(isleft ?
                                           WaveVR_Utils.k_nRenderEventID_SubmitL :
                                           WaveVR_Utils.k_nRenderEventID_SubmitR);

        WaveVR_Utils.SendRenderEventNative(isleft ?
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeEndL :
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeEndR);
        WaveVR_Utils.Trace.EndSection();
    }