public TexturePool(TextureConfig cfg, int size, WVR_Eye eye) : base(eye)
        {
            using (var ee = Log.ee("WVR_TexMngr", "TexturePool+", "TexturePool-"))
            {
                isReleased = false;

#if UNITY_EDITOR
                // Editor doesn't need the texture queue.
                size = 1;
#endif

                this.isLeft = isLeft;
                this.size   = size;
                for (int i = 0; i < size; i++)
                {
                    currentRt       = CreateTexture(cfg);
                    currentPtr      = GetNativePtr(currentRt);
                    currentDepthPtr = GetNativeDepthBufferPtr(currentRt);

                    textures.Add(currentPtr, currentRt);
                    depthes.Add(currentPtr, currentDepthPtr);

                    Log.d("WVR_TexMngr", "Gen rt" + currentPtr + " dp" + currentDepthPtr);
                }
                keyArray = new IntPtr[textures.Count];
                textures.Keys.CopyTo(keyArray, 0);

#if UNITY_EDITOR
                if (!Application.isEditor)
#endif
                queue = WaveVR_Utils.WVR_StoreRenderTextures(keyArray, size, isBoth || isLeft);
            }
        }
Exemplo n.º 2
0
        public TexturePool(TextureConfig cfg, int size, WVR_Eye eye) : base(eye)
        {
#if !UNITY_STANDALONE
            this.cfg = cfg;
            using (var ee = Log.ee(TextureManager.TAG, "TexturePool+", "TexturePool-"))
            {
                isReleased = false;

#if UNITY_EDITOR
                // Editor doesn't need the texture queue.
                size = 1;
#endif

                this.isLeft = isLeft;
                this.size   = size;

                // It will always get an error internally due to our EGL hacking.  Close the callstack dump for speed.
                var origin = Application.GetStackTraceLogType(LogType.Error);
                Application.SetStackTraceLogType(LogType.Error, StackTraceLogType.None);

                for (int i = 0; i < size; i++)
                {
                    rts.Add(CreateTexture(cfg));
                }

                // Call GetNativePtr once after all texture are created.  Try not to block render thread too long.
                for (int i = 0; i < size; i++)
                {
                    T rt = rts[i];
                    currentPtr      = GetNativePtr(rt);
                    currentDepthPtr = GetNativeDepthBufferPtr(rt);

                    textures.Add(currentPtr, rt);
                    depthes.Add(currentPtr, currentDepthPtr);
                    cfgs.Add(currentPtr, cfg);

                    Log.i(TextureManager.TAG, "Gen rt" + currentPtr + " dp" + currentDepthPtr);
                }

                Log.e(TextureManager.TAG, "Don't worry about the libEGL and Unity error showing above.  They are safe and will not crash your game.");
                Application.SetStackTraceLogType(LogType.Error, origin);

                keyArray = new Int32[textures.Count];
                textures.Keys.CopyTo(keyArray, 0);

#if UNITY_EDITOR && UNITY_ANDROID
                if (!Application.isEditor)
#endif
                if (eye == WVR_Eye.WVR_Eye_Both)
                {
                    queue = WaveVR_Utils.WVR_StoreRenderTextures(keyArray, size, isBoth || isLeft, WVR_TextureTarget.WVR_TextureTarget_2D_ARRAY);
                }
                else
                {
                    queue = WaveVR_Utils.WVR_StoreRenderTextures(keyArray, size, isBoth || isLeft, WVR_TextureTarget.WVR_TextureTarget_2D);
                }
            }
#endif
        }
Exemplo n.º 3
0
 public Data()
 {
     eye       = WVR_Eye.WVR_Eye_Left;
     camera    = null;
     wvrCamera = null;
     cmdBuf    = null;
     mesh      = null;
 }
Exemplo n.º 4
0
    private Matrix4x4 GetEditorProjection(WVR_Eye eye)
    {
        var       camera = GetComponent <Camera>();
        Matrix4x4 proj = Matrix4x4.identity;
        float     w = Mathf.Tan(camera.fieldOfView / 2 * Mathf.Deg2Rad);
        float     h = w / (Screen.width / 2) * Screen.height;
        float     l = -w / 2, r = w / 2, t = h / 2, b = -h / 2;

        proj = MakeProjection(l, r, t, b, camera.nearClipPlane, camera.farClipPlane);
        return(proj);
    }
Exemplo n.º 5
0
    void MyRenderEye(WaveVR_Render render, WVR_Eye eye, WaveVR_Camera wvrCamera)
    {
        if (eye == WVR_Eye.WVR_Eye_Both)
        {
            return;
        }
        var camera = wvrCamera.GetCamera();

        canvas.worldCamera = camera;
        canvas.renderMode  = RenderMode.ScreenSpaceCamera;
    }
Exemplo n.º 6
0
    public void RenderEye(WVR_Eye eye, RenderTexture texture)
    {
        reset();
        WaveVR_Utils.Trace.BeginSection((eye == WVR_Eye.WVR_Eye_Left) ? "Distortion_WVR_Eye_Left" : "Distortion_WVR_Eye_Right");
        current = eye == WVR_Eye.WVR_Eye_Left ? Cleft : Cright;
        current.material.mainTexture = texture;

        cam.enabled = true;
        cam.rect    = current.rect;
        cam.Render();
        cam.enabled = false;
        WaveVR_Utils.Trace.EndSection();
    }
 public EyeConfig(WVR_Eye eye)
 {
     if (eye == WVR_Eye.WVR_Eye_Both)
     {
         isLeft  = false;
         isRight = false;
         isBoth  = true;
     }
     else
     {
         isLeft  = eye == WVR_Eye.WVR_Eye_Left;
         isRight = !isLeft;
     }
 }
Exemplo n.º 8
0
    private WaveVR_Camera CreateEye(WVR_Eye eye)
    {
        Log.d(LOG_TAG, "CreateEye(" + eye + ")+");

        bool          isleft   = eye == WVR_Eye.WVR_Eye_Left;
        WaveVR_Camera vrcamera = isleft ? lefteye : righteye;
        Camera        camera;

        if (vrcamera == null)
        {
            string     eyename = isleft ? OBJ_NAME_LEFT_EYE : OBJ_NAME_RIGHT_EYE;
            GameObject go      = new GameObject(eyename);
            go.transform.SetParent(transform, false);
            camera = go.AddComponent <Camera>();
            camera.nearClipPlane = GetComponent <Camera>().nearClipPlane;
            camera.farClipPlane  = GetComponent <Camera>().farClipPlane;
            go.AddComponent <FlareLayer>();
            go.AddComponent <GUILayer>();
            vrcamera = go.AddComponent <WaveVR_Camera>();
        }
        else
        {
            camera = vrcamera.GetComponent <Camera>();
        }

        if (Application.isEditor)
        {
            camera.transform.localPosition = new Vector3(isleft ? -ipd / 2 : ipd / 2, 0, 0.15f);
        }
        else
        {
            camera.transform.localPosition = eyes[isleft ? 0 : 1].pos;
        }

        vrcamera.eye   = eye;
        camera.enabled = false;

        if (Application.isEditor)
        {
            camera.projectionMatrix = GetEditorProjection(eye);
        }
        else
        {
            camera.projectionMatrix = GetProjection(eye);
        }

        Log.d(LOG_TAG, "CreateEye(" + eye + ")-");
        return(vrcamera);
    }
Exemplo n.º 9
0
        private static void SetFoveatedRenderingParameterCheck(WVR_Eye eye, float ndcFocalPointX, float ndcFocalPointY, float clearVisionFOV, WVR_PeripheralQuality quality)
        {
            if (eye == WVR_Eye.WVR_Eye_Both || eye == WVR_Eye.WVR_Eye_None)
            {
                throw new System.ArgumentException("Invalid argument: eye (" + eye + ") should be WVR_Eye_Left or WVR_Eye_Right.");
            }

            if (quality < WVR_PeripheralQuality.Low || quality > WVR_PeripheralQuality.High)
            {
                throw new System.ArgumentException("Invalid argument: level (" + quality + ") should be in WVR_PeripheralQuality range.");
            }

            //Log.d(TAG, "eye " + eye + " XY = (" + ndcFocalPointX + ", " + ndcFocalPointY + ", " + clearVisionFOV + ", " + level + ")");
            WaveVR_Render.SetFoveatedRenderingParameter(eye, ndcFocalPointX, ndcFocalPointY, clearVisionFOV, quality);
        }
Exemplo n.º 10
0
        public Texture GetRenderTexture(WVR_Eye eye)
        {
            switch (eye)
            {
            case WVR_Eye.WVR_Eye_Both:
                return(both.currentRt);

            case WVR_Eye.WVR_Eye_Left:
                return(left.currentRt);

            case WVR_Eye.WVR_Eye_Right:
                return(right.currentRt);
            }
            return(null);
        }
Exemplo n.º 11
0
        public Int32 GetNativePtr(WVR_Eye eye)
        {
            switch (eye)
            {
            case WVR_Eye.WVR_Eye_Both:
                return(both.currentPtr);

            case WVR_Eye.WVR_Eye_Left:
                return(left.currentPtr);

            case WVR_Eye.WVR_Eye_Right:
                return(right.currentPtr);
            }
            return(0);
        }
Exemplo n.º 12
0
    Vector2 GetCenter(WVR_Eye eye)
    {
        Vector2 o;

        if (Application.isEditor)
        {
            o = new Vector2(0.5f, 0.5f);
        }
        else
        {
            float l = 0f, r = 0f, t = 0f, b = 0f;
            Interop.WVR_GetClippingPlaneBoundary(eye, ref l, ref r, ref t, ref b);
            o = new Vector2(-l / (r - l), t / (t - b));
        }
        return(o);
    }
Exemplo n.º 13
0
    public void RenderEye(WVR_Eye eye, RenderTexture texture)
    {
        reset();
        bool isleft = eye == WVR_Eye.WVR_Eye_Left;

        WaveVR_Utils.Trace.BeginSection(isleft ? "Distortion_WVR_Eye_Left" : "Distortion_WVR_Eye_Right");
        current = isleft ? Cleft : Cright;
        current.material.mainTexture = texture;

        cam.enabled = true;
        // When VR mode is enabled, the camera clean will not follow camera.rect.  Not to clear when drawing right eyes.
        cam.clearFlags = isleft ? CameraClearFlags.SolidColor : CameraClearFlags.Nothing;
        cam.rect       = current.rect;
        cam.Render();
        cam.enabled = false;
        WaveVR_Utils.Trace.EndSection();
    }
Exemplo n.º 14
0
        // Each eye can have individual value.
        public void Set(WVR_Eye eye, float clearVisionFOV, WVR_PeripheralQuality quality)
        {
            ValidateFOV(clearVisionFOV);
            ValidateQuality(quality);

            if (eye == WVR_Eye.WVR_Eye_Left)
            {
                leftClearVisionFOV    = clearVisionFOV;
                leftPeripheralQuality = quality;
            }
            else if (eye == WVR_Eye.WVR_Eye_Right)
            {
                rightClearVisionFOV    = clearVisionFOV;
                rightPeripheralQuality = quality;
            }
            else
            {
                throw new System.ArgumentException("Eye (" + eye + ") should be WVR_Eye_Left or WVR_Eye_Right.");
            }
            isDirty = true;
        }
Exemplo n.º 15
0
    private void RenderEye(Camera camera, WVR_Eye eye)
    {
        WaveVR_Utils.Trace.BeginSection("Render_" + eye);
        Log.gpl.d(LOG_TAG, "Render_" + eye);

        bool isleft = eye == WVR_Eye.WVR_Eye_Left;

#if UNITY_EDITOR
        if (!Application.isEditor)
#endif
        WaveVR_Utils.SendRenderEventNative(isleft ?
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeL :
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeR);
        WaveVR_CanvasEye.changeEye(camera);
        camera.enabled = true;
        RenderTexture rt = textureManager.GetRenderTexture(isleft);
        camera.targetTexture = rt;
        camera.Render();
        camera.enabled = false;
#if UNITY_EDITOR
        if (Application.isEditor)
        {
            distortion.RenderEye(eye, rt);
            return;
        }
#endif
        // Do submit
        WaveVR_Utils.SetRenderTexture(isleft ?
                                      textureManager.left.currentPtr :
                                      textureManager.right.currentPtr);

        WaveVR_Utils.SendRenderEventNative(isleft ?
                                           WaveVR_Utils.k_nRenderEventID_SubmitL :
                                           WaveVR_Utils.k_nRenderEventID_SubmitR);

        WaveVR_Utils.SendRenderEventNative(isleft ?
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeEndL :
                                           WaveVR_Utils.k_nRenderEventID_RenderEyeEndR);
        WaveVR_Utils.Trace.EndSection();
    }
Exemplo n.º 16
0
    private Matrix4x4 GetProjection(WVR_Eye eye)
    {
        Log.d(LOG_TAG, "GetProjection()");
        var       camera = GetComponent <Camera>();
        Matrix4x4 proj   = Matrix4x4.identity;

        float[] rect = new float[4] {
            0.0f, 0.0f, 0.0f, 0.0f
        };
        if (eye == WVR_Eye.WVR_Eye_Left)
        {
            rect = projRawL;
        }
        else
        {
            rect = projRawR;
        }

        // The values in ProjectionRaw are made by assuming the near value is 1.
        proj = MakeProjection(rect[0], rect[1], rect[2], rect[3], camera.nearClipPlane, camera.farClipPlane);
        return(proj);
    }
 public override void RenderMask(WVR_Eye eye)
 {
     WVR_RenderMask_Android(eye);
 }
 public static extern void WVR_RenderMask_Android(WVR_Eye eye);
 public override void PreRenderEye(WVR_Eye eye, [In, Out] WVR_TextureParams_t[] param, [In, Out] WVR_RenderFoveationParams[] foveationParams)
 {
     WVR_PreRenderEye_Android(eye, param, foveationParams);
 }
 public static extern void WVR_PreRenderEye_Android(WVR_Eye eye, [In, Out] WVR_TextureParams_t[] param, [In, Out] WVR_RenderFoveationParams[] foveationParams);
 public override WVR_SubmitError SubmitFrame(WVR_Eye eye, [In, Out] WVR_TextureParams_t[] param, [In, Out] WVR_PoseState_t[] pose, WVR_SubmitExtend extendMethod)
 {
     return(WVR_SubmitFrame_Android(eye, param, pose, extendMethod));
 }
 public static extern WVR_SubmitError WVR_SubmitFrame_Android(WVR_Eye eye, [In, Out] WVR_TextureParams_t[] param, [In, Out] WVR_PoseState_t[] pose, WVR_SubmitExtend extendMethod);
 public override WVR_Matrix4f_t GetTransformFromEyeToHead(WVR_Eye eye, WVR_NumDoF dof)
 {
     return(WVR_GetTransformFromEyeToHead_Android(eye, dof));
 }
 public static extern WVR_Matrix4f_t WVR_GetTransformFromEyeToHead_Android(WVR_Eye eye, WVR_NumDoF dof);
 public override void GetClippingPlaneBoundary(WVR_Eye eye, ref float left, ref float right, ref float top, ref float bottom)
 {
     WVR_GetClippingPlaneBoundary_Android(eye, ref left, ref right, ref top, ref bottom);
 }
 public static extern void WVR_GetClippingPlaneBoundary_Android(WVR_Eye eye, ref float left, ref float right, ref float top, ref float bottom);
 public override WVR_Matrix4f_t GetProjection(WVR_Eye eye, float near, float far)
 {
     return(WVR_GetProjection_Android(eye, near, far));
 }
 public static extern WVR_Matrix4f_t WVR_GetProjection_Android(WVR_Eye eye, float near, float far);
Exemplo n.º 29
0
 public static extern uint WVR_SubmitFrame(WVR_Eye eye, ref WVR_TextureParams_t param, ref WVR_Pose_t pose, WVR_SubmitExtend extendMethod);
Exemplo n.º 30
0
 public RenderTexture GetRenderTexture(WVR_Eye eye)
 {
     return(eye == WVR_Eye.WVR_Eye_Left ? left.currentRt : right.currentRt);
 }