Example #1
0
    private void ConfigureEyeDesc(OVREye eye)
    {
        Vector2 texSize = Vector2.zero;
        Vector2 fovSize = Vector2.zero;

#if !UNITY_ANDROID || UNITY_EDITOR
        if (!OVRManager.instance.isVRPresent)
        {
            return;
        }

        FovPort fovPort = OVRManager.capiHmd.GetDesc().DefaultEyeFov[(int)eye];
        fovPort.LeftTan = fovPort.RightTan = Mathf.Max(fovPort.LeftTan, fovPort.RightTan);
        fovPort.UpTan   = fovPort.DownTan = Mathf.Max(fovPort.UpTan, fovPort.DownTan);

        texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fovPort, OVRManager.instance.nativeTextureScale).ToVector2();
        fovSize = new Vector2(2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.LeftTan), 2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.UpTan));
#else
        texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale;
        fovSize = new Vector2(90, 90);
#endif

        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = texSize,
            fov        = fovSize
        };
    }
Example #2
0
    private void ConfigureEyeDesc(OVREye eye)
    {
        Vector2 texSize = Vector2.zero;
        Vector2 fovSize = Vector2.zero;

#if !UNITY_ANDROID || UNITY_EDITOR
        if (!OVRManager.instance.isVRPresent)
        {
            return;
        }

        OVRPlugin.Sizei    size    = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye);
        OVRPlugin.Frustumf frustum = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);

        texSize = new Vector2(size.w, size.h);
        fovSize = Mathf.Rad2Deg * new Vector2(frustum.fovX, frustum.fovY);
#else
        texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale;
        fovSize = new Vector2(90, 90);
#endif

        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = texSize,
            fov        = fovSize
        };
    }
Example #3
0
    private Transform ConfigureEyeAnchor(Transform root, OVREye eye)
    {
        string    name   = eye.ToString() + eyeAnchorName;
        Transform anchor = transform.Find(root.name + "/" + name);

        if (anchor == null)
        {
            anchor = transform.Find(name);
        }

        if (anchor == null)
        {
            string legacyName = legacyEyeAnchorName + eye.ToString();
            anchor = transform.Find(legacyName);
        }

        if (anchor == null)
        {
            anchor = new GameObject(name).transform;
        }

        anchor.name          = name;
        anchor.parent        = root;
        anchor.localScale    = Vector3.one;
        anchor.localPosition = Vector3.zero;
        anchor.localRotation = Quaternion.identity;

        return(anchor);
    }
Example #4
0
    private void ConfigureEyeDesc(OVREye eye)
    {
#if !UNITY_ANDROID || UNITY_EDITOR
        HmdDesc desc = OVRManager.capiHmd.GetDesc();
        FovPort fov  = desc.DefaultEyeFov[(int)eye];
        fov.LeftTan = fov.RightTan = Mathf.Max(fov.LeftTan, fov.RightTan);
        fov.UpTan   = fov.DownTan = Mathf.Max(fov.UpTan, fov.DownTan);

        // Configure Stereo settings. Default pixel density is one texel per pixel.
        float desiredPixelDensity = 1f;
        Sizei texSize             = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fov, desiredPixelDensity);

        float fovH = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.LeftTan);
        float fovV = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.UpTan);

        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = texSize.ToVector2(),
            fov        = new Vector2(fovH, fovV)
        };
#else
        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = new Vector2(1024, 1024),
            fov        = new Vector2(90, 90)
        };
#endif
    }
Example #5
0
    private Camera ConfigureCamera(OVREye eye)
    {
        Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
        Camera    cam    = anchor.GetComponent <Camera>();

        OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);

        cam.fieldOfView   = eyeDesc.fov.y;
        cam.aspect        = eyeDesc.resolution.x / eyeDesc.resolution.y;
        cam.rect          = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
        cam.targetTexture = OVRManager.display.GetEyeTexture(eye);

        // AA is documented to have no effect in deferred, but it causes black screens.
        if (cam.actualRenderingPath == RenderingPath.DeferredLighting)
        {
            QualitySettings.antiAliasing = 0;
        }

#if !UNITY_ANDROID || UNITY_EDITOR
#if OVR_USE_PROJ_MATRIX
        cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
#endif
#endif

        return(cam);
    }
Example #6
0
    public static void EndEye(OVREye eye, int eyeTextureId)
    {
#if UNITY_ANDROID && !UNITY_EDITOR
        RenderEventType eventType = (eye == OVREye.Left) ?
                                    RenderEventType.LeftEyeEndFrame :
                                    RenderEventType.RightEyeEndFrame;

        OVRPluginEvent.IssueWithData(eventType, eyeTextureId);
#endif
    }
Example #7
0
    private Camera ConfigureCamera(OVREye eye)
    {
        Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
        Camera    cam    = anchor.GetComponent <Camera>();

        OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);

        cam.fieldOfView   = eyeDesc.fov.y;
        cam.aspect        = eyeDesc.resolution.x / eyeDesc.resolution.y;
        cam.rect          = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
        cam.targetTexture = OVRManager.display.GetEyeTexture(eye);
        cam.hdr           = OVRManager.instance.hdr;

#if UNITY_ANDROID && !UNITY_EDITOR
        // Enforce camera render order
        cam.depth = (eye == OVREye.Left) ?
                    (int)RenderEventType.LeftEyeEndFrame :
                    (int)RenderEventType.RightEyeEndFrame;

        // If we don't clear the color buffer with a glClear, tiling GPUs
        // will be forced to do an "unresolve" and read back the color buffer information.
        // The clear is free on PowerVR, and possibly Mali, but it is a performance cost
        // on Adreno, and we would be better off if we had the ability to discard/invalidate
        // the color buffer instead of clearing.

        // NOTE: The color buffer is not being invalidated in skybox mode, forcing an additional,
        // wasted color buffer read before the skybox is drawn.
        bool hasSkybox = ((cam.clearFlags == CameraClearFlags.Skybox) &&
                          ((cam.gameObject.GetComponent <Skybox>() != null) || (RenderSettings.skybox != null)));
        cam.clearFlags = (hasSkybox) ? CameraClearFlags.Skybox : CameraClearFlags.SolidColor;
#endif

        // When rendering monoscopic, we will use the left camera render for both eyes.
        if (eye == OVREye.Right)
        {
            cam.enabled = !OVRManager.instance.monoscopic;
        }

        // AA is documented to have no effect in deferred, but it causes black screens.
        if (cam.actualRenderingPath == RenderingPath.DeferredLighting)
        {
            QualitySettings.antiAliasing = 0;
        }

#if !UNITY_ANDROID || UNITY_EDITOR
#if OVR_USE_PROJ_MATRIX
        cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
#endif
#endif

        return(cam);
    }
Example #8
0
    /// <summary>
    /// Gets the pose of the given eye, predicted for the time when the current frame will scan out.
    /// </summary>
    /// <description>NOTE: This is safe to call in an Update function, but not in LateUpdate or subsequent callbacks.</description>
    public OVRPose GetEyePose(OVREye eye)
    {
#if !UNITY_ANDROID || UNITY_EDITOR
        if (!OVRManager.instance.isVRPresent)
        {
            return(new OVRPose
            {
                position = Vector3.zero,
                orientation = Quaternion.identity,
            });
        }

        bool updateEyePose = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp);
        if (updateEyePose)
        {
            eyePoses[(int)eye] = OVRPlugin.GetEyePose((OVRPlugin.Eye)eye).ToOVRPose();
        }

        return(eyePoses[(int)eye]);
#else
        if (eye == OVREye.Left)
        {
            OVR_GetSensorState(
                OVRManager.instance.monoscopic,
                ref w,
                ref x,
                ref y,
                ref z,
                ref fov,
                ref timeWarpViewNumber);
        }

        Quaternion rot = new Quaternion(-x, -y, z, w);

        float eyeOffsetX = 0.5f * OVRManager.profile.ipd;
        eyeOffsetX = (eye == OVREye.Left) ? -eyeOffsetX : eyeOffsetX;

        float   neckToEyeHeight = OVRManager.profile.eyeHeight - OVRManager.profile.neckHeight;
        Vector3 headNeckModel   = new Vector3(0.0f, neckToEyeHeight, OVRManager.profile.eyeDepth);
        Vector3 pos             = rot * (new Vector3(eyeOffsetX, 0.0f, 0.0f) + headNeckModel);

        // Subtract the HNM pivot to avoid translating the camera when level
        pos -= headNeckModel;

        return(new OVRPose
        {
            position = pos,
            orientation = rot,
        });
#endif
    }
    void OnPostRender()
    {
#if UNITY_ANDROID && !UNITY_EDITOR
        // Allow custom code to render before we kick off the plugin
        if (OnCustomPostRender != null)
        {
            OnCustomPostRender();
        }

        OVREye eye = ((RenderEventType)Camera.current.depth == RenderEventType.RightEyeEndFrame) ?
                     OVREye.Right : OVREye.Left;
        OVRManager.EndEye(eye);
#endif
    }
Example #10
0
    private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye, float scale)
    {
        int eyeIndex = eyeBufferIndex + (int)eye;

        EyeRenderDesc eyeDesc = eyeDescs[(int)eye];

        int w = (int)(eyeDesc.resolution.x * scale);
        int h = (int)(eyeDesc.resolution.y * scale);

        eyeTextures[eyeIndex] = new RenderTexture(w, h, OVRManager.instance.eyeTextureDepth, OVRManager.instance.eyeTextureFormat);
        eyeTextures[eyeIndex].antiAliasing = (QualitySettings.antiAliasing == 0) ? 1 : QualitySettings.antiAliasing;
        eyeTextures[eyeIndex].Create();
        eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID();
    }
Example #11
0
    private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye)
    {
        int           eyeIndex = eyeBufferIndex + (int)eye;
        EyeRenderDesc eyeDesc  = eyeDescs[(int)eye];

        eyeTextures[eyeIndex] = new RenderTexture(
            (int)eyeDesc.resolution.x,
            (int)eyeDesc.resolution.y,
            (int)OVRManager.instance.eyeTextureDepth,
            OVRManager.instance.eyeTextureFormat);
        eyeTextures[eyeIndex].antiAliasing = (int)OVRManager.instance.eyeTextureAntiAliasing;
        eyeTextures[eyeIndex].Create();
        eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID();
    }
Example #12
0
    private Camera ConfigureCamera(OVREye eye)
    {
        Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
        Camera    cam    = anchor.GetComponent <Camera>();

        OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);

        cam.fieldOfView   = eyeDesc.fov.y;
        cam.aspect        = eyeDesc.resolution.x / eyeDesc.resolution.y;
        cam.rect          = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
        cam.targetTexture = OVRManager.display.GetEyeTexture(eye);

#if !UNITY_ANDROID || UNITY_EDITOR
#if OVR_USE_PROJ_MATRIX
        cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
#endif
#endif

        return(cam);
    }
Example #13
0
    private Transform ConfigureEyeAnchor(OVREye eye)
    {
        string    name   = eye.ToString() + "EyeAnchor";
        Transform anchor = transform.Find(name);

        if (anchor == null)
        {
            string oldName = "Camera" + eye.ToString();
            anchor = transform.Find(oldName);
        }

        if (anchor == null)
        {
            anchor = new GameObject(name).transform;
        }

        anchor.parent        = transform;
        anchor.localScale    = Vector3.one;
        anchor.localPosition = Vector3.zero;
        anchor.localRotation = Quaternion.identity;

        return(anchor);
    }
Example #14
0
    private Camera ConfigureCamera(OVREye eye)
    {
        Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
        Camera cam = anchor.GetComponent<Camera>();

        OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);

        cam.fieldOfView = eyeDesc.fov.y;
        cam.aspect = eyeDesc.resolution.x / eyeDesc.resolution.y;
        cam.rect = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
        cam.targetTexture = OVRManager.display.GetEyeTexture(eye);

        // AA is documented to have no effect in deferred, but it causes black screens.
        if (cam.actualRenderingPath == RenderingPath.DeferredLighting)
            QualitySettings.antiAliasing = 0;

        #if !UNITY_ANDROID || UNITY_EDITOR
        #if OVR_USE_PROJ_MATRIX
        cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
        #endif
        #endif

        return cam;
    }
Example #15
0
    /// <summary>
    /// Gets the pose of the given eye, predicted for the time when the current frame will scan out.
    /// </summary>
    public OVRPose GetEyePose(OVREye eye)
    {
#if !UNITY_ANDROID || UNITY_EDITOR
        bool updateEyePose = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp);
        if (updateEyePose)
        {
            eyePoses[(int)eye] = OVR_GetRenderPose(frameCount, (int)eye).ToPose();
        }

        return(eyePoses[(int)eye]);
#else
        float w = 0, x = 0, y = 0, z = 0;
        float fov = 90.0f;

        OVR_GetSensorState(
            false,
            ref w,
            ref x,
            ref y,
            ref z,
            ref fov,
            ref OVRManager.timeWarpViewNumber);

        Quaternion rot = new Quaternion(-x, -y, z, w);

        float eyeOffsetX = 0.5f * OVRManager.profile.ipd;
        eyeOffsetX = (eye == OVREye.Left) ? -eyeOffsetX : eyeOffsetX;
        Vector3 pos = rot * new Vector3(eyeOffsetX, 0.0f, 0.0f);

        return(new OVRPose
        {
            position = pos,
            orientation = rot,
        });
#endif
    }
Example #16
0
    public static void EndEye(OVREye eye)
    {
#if UNITY_ANDROID && !UNITY_EDITOR
		RenderEventType eventType = (eye == OVREye.Left) ?
			RenderEventType.LeftEyeEndFrame :
			RenderEventType.RightEyeEndFrame;
		int eyeTextureId = display.GetEyeTextureId(eye);

		OVRPluginEvent.IssueWithData(eventType, eyeTextureId);
#endif
    }
Example #17
0
    /// <summary>
    /// Gets the pose of the given eye, predicted for the time when the current frame will scan out.
    /// </summary>
    public OVRPose GetEyePose(OVREye eye)
    {
        #if !UNITY_ANDROID || UNITY_EDITOR
        bool updateEyePose = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp);
        if (updateEyePose)
        {
            eyePoses[(int)eye] = OVR_GetRenderPose(frameCount, (int)eye).ToPose();
        }

        return eyePoses[(int)eye];
        #else
        if (eye == OVREye.Left)
            OVR_GetSensorState(
                    OVRManager.instance.monoscopic,
                   	ref w,
                   	ref x,
                   	ref y,
                   	ref z,
                   	ref fov,
                   	ref OVRManager.timeWarpViewNumber);

        Quaternion rot = new Quaternion(-x, -y, z, w);

        float eyeOffsetX = 0.5f * OVRManager.profile.ipd;
        eyeOffsetX = (eye == OVREye.Left) ? -eyeOffsetX : eyeOffsetX;

        float neckToEyeHeight = OVRManager.profile.eyeHeight - OVRManager.profile.neckHeight;
        Vector3 headNeckModel = new Vector3(0.0f, neckToEyeHeight, OVRManager.profile.eyeDepth);
        Vector3 pos = rot * (new Vector3(eyeOffsetX, 0.0f, 0.0f) + headNeckModel);

        // Subtract the HNM pivot to avoid translating the camera when level
        pos -= headNeckModel;

        return new OVRPose
        {
            position = pos,
            orientation = rot,
        };
        #endif
    }
Example #18
0
	private Transform ConfigureEyeAnchor(Transform root, OVREye eye)
	{
		string name = eye.ToString() + eyeAnchorName;
		Transform anchor = transform.Find(root.name + "/" + name);

		if (anchor == null)
		{
			anchor = transform.Find(name);
		}

		if (anchor == null)
		{
			string legacyName = legacyEyeAnchorName + eye.ToString();
			anchor = transform.Find(legacyName);
		}

		if (anchor == null)
		{
			anchor = new GameObject(name).transform;
		}

		anchor.name = name;
		anchor.parent = root;
		anchor.localScale = Vector3.one;
		anchor.localPosition = Vector3.zero;
		anchor.localRotation = Quaternion.identity;

		return anchor;
	}
Example #19
0
	private Camera ConfigureCamera(OVREye eye)
	{
		Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
		Camera cam = anchor.GetComponent<Camera>();

		OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);

		cam.fieldOfView = eyeDesc.fov.y;
		cam.aspect = eyeDesc.resolution.x / eyeDesc.resolution.y;
		cam.rect = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
		cam.targetTexture = OVRManager.display.GetEyeTexture(eye);
		cam.hdr = OVRManager.instance.hdr;

#if UNITY_ANDROID && !UNITY_EDITOR
		// Enforce camera render order
		cam.depth = (eye == OVREye.Left) ?
				(int)RenderEventType.LeftEyeEndFrame :
				(int)RenderEventType.RightEyeEndFrame;

		// If we don't clear the color buffer with a glClear, tiling GPUs
		// will be forced to do an "unresolve" and read back the color buffer information.
		// The clear is free on PowerVR, and possibly Mali, but it is a performance cost
		// on Adreno, and we would be better off if we had the ability to discard/invalidate
		// the color buffer instead of clearing.

		// NOTE: The color buffer is not being invalidated in skybox mode, forcing an additional,
		// wasted color buffer read before the skybox is drawn.
		bool hasSkybox = ((cam.clearFlags == CameraClearFlags.Skybox) &&
		                 ((cam.gameObject.GetComponent<Skybox>() != null) || (RenderSettings.skybox != null)));
		cam.clearFlags = (hasSkybox) ? CameraClearFlags.Skybox : CameraClearFlags.SolidColor;
#endif

		// When rendering monoscopic, we will use the left camera render for both eyes.
		if (eye == OVREye.Right)
		{
			cam.enabled = !OVRManager.instance.monoscopic;
		}

		// AA is documented to have no effect in deferred, but it causes black screens.
		if (cam.actualRenderingPath == RenderingPath.DeferredLighting)
			QualitySettings.antiAliasing = 0;

#if !UNITY_ANDROID || UNITY_EDITOR
#if OVR_USE_PROJ_MATRIX
		cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
#endif
#endif

		return cam;
	}
Example #20
0
    private void ConfigureEyeDesc(OVREye eye)
    {
        Vector2 texSize = Vector2.zero;
        Vector2 fovSize = Vector2.zero;

        #if !UNITY_ANDROID || UNITY_EDITOR
        FovPort fovPort = OVRManager.capiHmd.GetDesc().DefaultEyeFov[(int)eye];
        fovPort.LeftTan = fovPort.RightTan = Mathf.Max(fovPort.LeftTan, fovPort.RightTan);
        fovPort.UpTan = fovPort.DownTan = Mathf.Max(fovPort.UpTan, fovPort.DownTan);

        texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fovPort, OVRManager.instance.nativeTextureScale).ToVector2();
        fovSize = new Vector2(2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.LeftTan), 2f * Mathf.Rad2Deg * Mathf.Atan(fovPort.UpTan));
        #else
        texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale;
        fovSize = new Vector2(90, 90);
        #endif

        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = texSize,
            fov = fovSize
        };
    }
Example #21
0
    private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye)
    {
        int eyeIndex = eyeBufferIndex + (int)eye;
        EyeRenderDesc eyeDesc = eyeDescs[(int)eye];

        eyeTextures[eyeIndex] = new RenderTexture(
            (int)eyeDesc.resolution.x,
            (int)eyeDesc.resolution.y,
            (int)OVRManager.instance.eyeTextureDepth,
            OVRManager.instance.eyeTextureFormat);

        eyeTextures[eyeIndex].antiAliasing = (int)OVRManager.instance.eyeTextureAntiAliasing;

        eyeTextures[eyeIndex].Create();
        eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID();
    }
Example #22
0
 /// <summary>
 /// Gets the currently active render texture for the given eye.
 /// </summary>
 public RenderTexture GetEyeTexture(OVREye eye)
 {
     return eyeTextures[currEyeTextureIdx + (int)eye];
 }
Example #23
0
 /// <summary>
 /// Gets the currently active render texture's native ID for the given eye.
 /// </summary>
 public int GetEyeTextureId(OVREye eye)
 {
     return eyeTextureIds[currEyeTextureIdx + (int)eye];
 }
Example #24
0
    private void ConfigureEyeDesc(OVREye eye)
    {
        Vector2 texSize = Vector2.zero;
        Vector2 fovSize = Vector2.zero;

        #if !UNITY_ANDROID || UNITY_EDITOR
        if (!OVRManager.instance.isVRPresent)
            return;

        OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye);
        OVRPlugin.Frustumf frustum = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);

        texSize = new Vector2(size.w, size.h);
        fovSize = Mathf.Rad2Deg * new Vector2(frustum.fovX, frustum.fovY);
        #else
        texSize = new Vector2(1024, 1024) * OVRManager.instance.nativeTextureScale;
        fovSize = new Vector2(90, 90);
        #endif

        eyeDescs[(int)eye] = new EyeRenderDesc()
        {
            resolution = texSize,
            fov = fovSize
        };
    }
Example #25
0
 //Dont make display in OVRManager. So, this method is needed.
 public OVRPose GetEyePose(OVREye eye)
 {
     return(OVR_GetRenderPose(Time.frameCount, (int)eye).ToPose());
 }
Example #26
0
    private Camera ConfigureCamera(OVREye eye)
    {
        Transform anchor = (eye == OVREye.Left) ? leftEyeAnchor : rightEyeAnchor;
        Camera cam = anchor.GetComponent<Camera>();

        OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(eye);

        cam.fieldOfView = eyeDesc.fov.y;
        cam.aspect = eyeDesc.resolution.x / eyeDesc.resolution.y;
        cam.rect = new Rect(0f, 0f, OVRManager.instance.virtualTextureScale, OVRManager.instance.virtualTextureScale);
        cam.targetTexture = OVRManager.display.GetEyeTexture(eye);

        #if !UNITY_ANDROID || UNITY_EDITOR
        #if OVR_USE_PROJ_MATRIX
        cam.projectionMatrix = OVRManager.display.GetProjection((int)eye, cam.nearClipPlane, cam.farClipPlane);
        #endif
        #endif

        return cam;
    }
 /// <summary>
 /// Gets the currently active render texture for the given eye.
 /// </summary>
 public RenderTexture GetEyeTexture(OVREye eye)
 {
     return(eyeTextures[currEyeTextureIdx + (int)eye]);
 }
Example #28
0
	private Transform ConfigureEyeAnchor(OVREye eye)
	{
		string name = eye.ToString() + "EyeAnchor";
		Transform anchor = transform.Find(name);

		if (anchor == null)
		{
			string oldName = "Camera" + eye.ToString();
			anchor = transform.Find(oldName);
		}

		if (anchor == null)
			anchor = new GameObject(name).transform;

		anchor.parent = transform;
		anchor.localScale = Vector3.one;
		anchor.localPosition = Vector3.zero;
		anchor.localRotation = Quaternion.identity;

		return anchor;
	}
	/// <summary>
	/// Gets the pose of the given eye, predicted for the time when the current frame will scan out.
	/// </summary>
	public OVRPose GetEyePose(OVREye eye)
	{
#if !UNITY_ANDROID || UNITY_EDITOR
		bool updateEyePose = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp);
		if (updateEyePose)
		{
			eyePoses[(int)eye] = OVR_GetRenderPose(frameCount, (int)eye).ToPose();
		}

		return eyePoses[(int)eye];
#else
		if (eye == OVREye.Left)
			OVR_GetSensorState(
					false,
				   	ref w,
				   	ref x,
				   	ref y,
				   	ref z,
				   	ref fov,
				   	ref OVRManager.timeWarpViewNumber);

		Quaternion rot = new Quaternion(-x, -y, z, w);

		float eyeOffsetX = 0.5f * OVRManager.profile.ipd;
		eyeOffsetX = (eye == OVREye.Left) ? -eyeOffsetX : eyeOffsetX;
		Vector3 pos = rot * new Vector3(eyeOffsetX, 0.0f, 0.0f);

		return new OVRPose
		{
			position = pos,
			orientation = rot,
		};
#endif
	}
Example #30
0
 /// <summary>
 /// Gets the resolution and field of view for the given eye.
 /// </summary>
 public EyeRenderDesc GetEyeRenderDesc(OVREye eye)
 {
     return eyeDescs[(int)eye];
 }
 /// <summary>
 /// Gets the resolution and field of view for the given eye.
 /// </summary>
 public EyeRenderDesc GetEyeRenderDesc(OVREye eye)
 {
     return(eyeDescs[(int)eye]);
 }
	private void ConfigureEyeDesc(OVREye eye)
	{
#if !UNITY_ANDROID || UNITY_EDITOR
		HmdDesc desc = OVRManager.capiHmd.GetDesc();
		FovPort fov = desc.DefaultEyeFov[(int)eye];
		fov.LeftTan = fov.RightTan = Mathf.Max(fov.LeftTan, fov.RightTan);
		fov.UpTan = fov.DownTan = Mathf.Max(fov.UpTan, fov.DownTan);

		// Configure Stereo settings. Default pixel density is one texel per pixel.
		float desiredPixelDensity = 1f;
		Sizei texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fov, desiredPixelDensity);

		float fovH = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.LeftTan);
		float fovV = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.UpTan);

		eyeDescs[(int)eye] = new EyeRenderDesc()
		{
			resolution = texSize.ToVector2(),
					   fov = new Vector2(fovH, fovV)
		};
#else
		eyeDescs[(int)eye] = new EyeRenderDesc()
		{
			resolution = new Vector2(1024, 1024),
					   fov = new Vector2(90, 90)
		};
#endif
	}
 /// <summary>
 /// Gets the currently active render texture's native ID for the given eye.
 /// </summary>
 public int GetEyeTextureId(OVREye eye)
 {
     return(eyeTextureIds[currEyeTextureIdx + (int)eye]);
 }
	private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye, float scale)
	{
		int eyeIndex = eyeBufferIndex + (int)eye;
			
		EyeRenderDesc eyeDesc = eyeDescs[(int)eye];

		int w = (int)(eyeDesc.resolution.x * scale);
		int h = (int)(eyeDesc.resolution.y * scale);

		eyeTextures[eyeIndex] = new RenderTexture(w, h, OVRManager.instance.eyeTextureDepth, OVRManager.instance.eyeTextureFormat);
		eyeTextures[eyeIndex].antiAliasing = (QualitySettings.antiAliasing == 0) ? 1 : QualitySettings.antiAliasing;
		eyeTextures[eyeIndex].Create();
		eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID();
	}