//Convert OSVR.ClientKit.Viewport to Rect public static Rect ConvertViewport(OSVR.ClientKit.Viewport viewport, OSVR.ClientKit.DisplayDimensions surfaceDisplayDimensions, int numDisplayInputs, int eyeIndex, int totalDisplayWidth) { //Unity expects normalized coordinates, not pixel coordinates if (numDisplayInputs == 1) { return new Rect((float)viewport.Left / (float)surfaceDisplayDimensions.Width, (float)viewport.Bottom / (float)surfaceDisplayDimensions.Height, (float)viewport.Width / (float)surfaceDisplayDimensions.Width, (float)viewport.Height / (float)surfaceDisplayDimensions.Height); } else if(numDisplayInputs == 2) { //with two inputs in fullscreen mode, viewports expect to fill the screen //Unity can only output to one window, so we offset the right eye by half the total width of the displays return new Rect(eyeIndex == 0 ? 0 : 0.5f + (float)viewport.Left / (float)totalDisplayWidth, (float)viewport.Bottom / (float)surfaceDisplayDimensions.Height, (float)viewport.Width / (float)totalDisplayWidth, (float)viewport.Height / (float)surfaceDisplayDimensions.Height); } else { Debug.LogError("[OSVR-Unity] More than two video inputs is not supported. Using default viewport."); return new Rect(0, 0, 0.5f, 1f); } }
/// <summary> /// Creates a K1RadialDistortion effect and adds it as a component to the given eye, if possible. /// </summary> /// <param name="Surface">VRSurface to apply the effect to.</param> /// <returns>K1RadialDistortion object for parameter setting, or null if not supported</returns> public K1RadialDistortion GetOrCreateDistortion(OSVR.Unity.VRSurface surface) { K1RadialDistortion ret = surface.DistortionEffect; if (!Supported) { if (ret) { // shouldn't be able to get here but... ret.enabled = false; ret = null; } return ret; } if (ret == null) { ret = surface.gameObject.AddComponent<K1RadialDistortion>(); surface.DistortionEffect = ret; ret.hideFlags = HideFlags.HideAndDontSave; ret.DistortionMaterial = new Material(DistortionShader); if (!ret.DistortionMaterial) { /// weird error case, shouldn't get here. Debug.LogWarning("Couldn't create material in OSVR distortion shader factory - shouldn't be able to happen!"); ret.enabled = false; return null; } ret.DistortionMaterial.hideFlags = HideFlags.HideAndDontSave; } else { ret.enabled = (ret.DistortionMaterial != null); } return ret; }
private void callback(IntPtr userdata, ref OSVR.ClientKit.TimeValue timestamp, ref OSVR.ClientKit.PositionReport report) { #if THREAD_ACTIVATED var temp = Math.ConvertPosition(report.xyz); lock (LOCKER) { data = temp; } NEW_DATA_EVENT.Set (); #else transform.localPosition = Math.ConvertPosition(report.xyz); #endif }
// Updates the position and rotation of the eye // Optionally, update the viewer associated with this eye public void UpdateEyePose(OSVR.ClientKit.Pose3 eyePose) { // Convert from OSVR space into Unity space. Vector3 pos = Math.ConvertPosition(eyePose.translation); Quaternion rot = Math.ConvertOrientation(eyePose.rotation); // RenderManager produces the eyeFromSpace matrix, but // Unity wants the inverse of that. if (Viewer.DisplayController.UseRenderManager) { // Invert the transformation cachedTransform.localRotation = Quaternion.Inverse(rot); Vector3 invPos = -pos; cachedTransform.localPosition = Quaternion.Inverse(rot) * invPos; } else { cachedTransform.localPosition = pos; cachedTransform.localRotation = rot; } }
//Convert OSVR.ClientKit.Matrix44f to Matrix4x4 public static Matrix4x4 ConvertMatrix(OSVR.ClientKit.Matrix44f matrix) { Matrix4x4 matrix4x4 = new Matrix4x4(); matrix4x4[0, 0] = matrix.M0; matrix4x4[1, 0] = matrix.M1; matrix4x4[2, 0] = matrix.M2; matrix4x4[3, 0] = matrix.M3; matrix4x4[0, 1] = matrix.M4; matrix4x4[1, 1] = matrix.M5; matrix4x4[2, 1] = matrix.M6; matrix4x4[3, 1] = matrix.M7; matrix4x4[0, 2] = matrix.M8; matrix4x4[1, 2] = matrix.M9; matrix4x4[2, 2] = matrix.M10; matrix4x4[3, 2] = matrix.M11; matrix4x4[0, 3] = matrix.M12; matrix4x4[1, 3] = matrix.M13; matrix4x4[2, 3] = matrix.M14; matrix4x4[3, 3] = matrix.M15; return matrix4x4; }
public static Quaternion ConvertOrientation2(OSVR.ClientKit.Quaternion quat) { return new Quaternion((float)quat.x, -(float)quat.y, -(float)quat.z, (float)quat.w) * new Quaternion(0, 0.707f, 0, 0.707f); }
//Set the camera's viewport rect public void SetViewport(OSVR.ClientKit.Viewport viewport) { Viewport = viewport; }
//Given distortion parameters, setup the appropriate distortion method //@todo this should be more generalized when we have more distortion options public void SetDistortion(OSVR.ClientKit.RadialDistortionParameters distortionParameters) { float k1Red = (float)distortionParameters.k1.x; float k1Green = (float)distortionParameters.k1.y; float k1Blue = (float)distortionParameters.k1.z; Vector2 center = new Vector2((float)distortionParameters.centerOfProjection.x, (float)distortionParameters.centerOfProjection.y); //@todo figure out which type of distortion to use //right now, there is only one option SetK1RadialDistortion(k1Red, k1Green, k1Blue, center); }
/// <summary> /// Pose (as a 4x4 matrix) wrapper callback, interfacing Managed-OSVR's signatures and more Unity-native datatypes, including coordinate system conversion. /// </summary> /// <param name="userdata">Unused</param> /// <param name="timestamp">Unused</param> /// <param name="report">Tracker pose report</param> private void PoseMatrixCb(System.IntPtr userdata, ref OSVR.ClientKit.TimeValue timestamp, ref OSVR.ClientKit.PoseReport report) { Matrix4x4 matPose = Math.ConvertPose(report.pose); if (null != poseMatrixCallbacks) { poseMatrixCallbacks(path, matPose); } }
//Call the Unity Rendering Plugin to initialize the RenderManager public int CreateRenderManager(OSVR.ClientKit.ClientContext clientContext) { int result; try { result = CreateRenderManagerFromUnity(clientContext.ContextHandle); } catch (DllNotFoundException e) { result = -1; Debug.LogError("[OSVR-Unity] Could not load " + e.Message + "\nosvrUnityRenderingPlugin.dll, or one of its dependencies, is missing from the project " + "or architecture doesn't match.\n"); } return result; }
/// <summary> /// Orientation wrapper callback, interfacing Managed-OSVR's signatures and more Unity-native datatypes, including coordinate system conversion. /// </summary> /// <param name="userdata">Unused</param> /// <param name="timestamp">Unused</param> /// <param name="report">Tracker orientation report</param> private void OrientationCb(System.IntPtr userdata, ref OSVR.ClientKit.TimeValue timestamp, ref OSVR.ClientKit.OrientationReport report) { Quaternion rotation = Math.ConvertOrientation(report.rotation); if (null != orientationCallbacks) { orientationCallbacks(path, rotation); } }
void Interface_StateChanged(object sender, OSVR.ClientKit.TimeValue timestamp, int sensor, double report) { Debug.Log("[OSVR-Unity-Samples] Got analog value " + report); }
/// <summary> /// Blink causes the color to change. At most once per second. /// </summary> void blinkInterface_StateChanged(object sender, OSVR.ClientKit.TimeValue timestamp, int sensor, bool report) { if (lastBlinkReportState != report && timestamp.seconds - lastBlinkColorChangeTimestampSeconds > 1) { ChangeColorNext(); lastBlinkColorChangeTimestampSeconds = timestamp.seconds; } lastBlinkReportState = report; }
private static extern Byte CreateRenderManagerFromUnity(OSVR.ClientKit.SafeClientContextHandle /*OSVR_ClientContext*/ ctx);
public static Vector2 ConvertPosition(OSVR.ClientKit.Vec2 vec) { return new Vector2((float)vec.x, (float)vec.y); }
public static Quaternion ConvertOrientation(OSVR.ClientKit.Quaternion quat) { /// Wikipedia may say quaternions are not handed, but these needed modification. return new Quaternion(-(float)quat.x, -(float)quat.y, (float)quat.z, (float)quat.w); }
public static Vector3 ConvertPosition(OSVR.ClientKit.Vec3 vec) { /// Convert to left-handed return new Vector3((float)vec.x, (float)vec.y, (float)-vec.z); }
/// <summary> /// Analog wrapper callback, interfacing Managed-OSVR's signatures and more Unity-native datatypes. /// </summary> /// <param name="userdata">Unused</param> /// <param name="timestamp">Unused</param> /// <param name="report">Analog report</param> private void AnalogCb(System.IntPtr userdata, ref OSVR.ClientKit.TimeValue timestamp, ref OSVR.ClientKit.AnalogReport report) { float val = (float)report.state; if (null != analogCallbacks) { analogCallbacks(path, val); } }
/// <summary> /// Button wrapper callback, interfacing Managed-OSVR's signatures and more Unity-native datatypes. /// </summary> /// <param name="userdata">Unused</param> /// <param name="timestamp">Unused</param> /// <param name="report">Button report</param> private void ButtonCb(System.IntPtr userdata, ref OSVR.ClientKit.TimeValue timestamp, ref OSVR.ClientKit.ButtonReport report) { bool pressed = (report.state == 1); if (buttonCallbacks != null) { buttonCallbacks(path, pressed); } }
void handleButton(object sender, OSVR.ClientKit.TimeValue timestamp, int sensor, byte report) { Debug.Log("Got button: " + sensor.ToString() + " state is " + report); }
/* END GENERATED CODE - unity-generate.lua */ #endregion /// These wrappers sadly have to be mostly hand-written, despite their similarity, since they convert data types /// and also data conventions (into Unity's left-handed coordinate system) #region Private wrapper callbacks/trampolines /// <summary> /// Pose (as position and orientation) wrapper callback, interfacing Managed-OSVR's signatures and more Unity-native datatypes, including coordinate system conversion. /// </summary> /// <param name="userdata">Unused</param> /// <param name="timestamp">Unused</param> /// <param name="report">Tracker pose report</param> private void PoseCb(System.IntPtr userdata, ref OSVR.ClientKit.TimeValue timestamp, ref OSVR.ClientKit.PoseReport report) { Vector3 position = Math.ConvertPosition(report.pose.translation); Quaternion rotation = Math.ConvertOrientation(report.pose.rotation); if (null != poseCallbacks) { poseCallbacks(path, position, rotation); } }
public static Matrix4x4 ConvertPose(OSVR.ClientKit.Pose3 pose) { return Matrix4x4.TRS(Math.ConvertPosition(pose.translation), Math.ConvertOrientation(pose.rotation), Vector3.zero); }
/// <summary> /// Position wrapper callback, interfacing Managed-OSVR's signatures and more Unity-native datatypes, including coordinate system conversion. /// </summary> /// <param name="userdata">Unused</param> /// <param name="timestamp">Unused</param> /// <param name="report">Tracker position report</param> private void PositionCb(System.IntPtr userdata, ref OSVR.ClientKit.TimeValue timestamp, ref OSVR.ClientKit.PositionReport report) { Vector3 position = Math.ConvertPosition(report.xyz); if (null != positionCallbacks) { positionCallbacks(path, position); } }
//Call the Unity Rendering Plugin to initialize the RenderManager public int CreateRenderManager(OSVR.ClientKit.ClientContext clientContext) { return CreateRenderManagerFromUnity(clientContext.ContextHandle); }
public static Rect ConvertViewportRenderManager(OSVR.ClientKit.Viewport viewport) { //Unity expects normalized coordinates, not pixel coordinates //@todo below assumes left and right eyes split the screen in half horizontally return new Rect(viewport.Left / viewport.Width, viewport.Bottom / viewport.Height, viewport.Width / viewport.Width, 1); }
//Updates the position and rotation of the head public void UpdateViewerHeadPose(OSVR.ClientKit.Pose3 headPose) { cachedTransform.localPosition = Math.ConvertPosition(headPose.translation); cachedTransform.localRotation = Math.ConvertOrientation(headPose.rotation); }
// Updates the position and rotation of the eye // Optionally, update the viewer associated with this eye public void UpdateEyePose(OSVR.ClientKit.Pose3 eyePose) { cachedTransform.localPosition = Math.ConvertPosition(eyePose.translation); cachedTransform.localRotation = Math.ConvertOrientation(eyePose.rotation); }