Ejemplo n.º 1
0
    void Update()
    {
        if (RUISOVRManager.ovrHmd != null)
        {
            Vector3 tempSample = Vector3.zero;

            Ovr.Posef headpose = RUISOVRManager.ovrHmd.GetTrackingState().HeadPose.ThePose;
            float     px       = headpose.Position.x;
            float     py       = headpose.Position.y;
            float     pz       = -headpose.Position.z;   // This needs to be negated TODO: might change with future OVR version

            tempSample = new Vector3(px, py, pz);

            tempSample = coordinateSystem.ConvertRawOculusDK2Location(tempSample);
            Vector3 convertedLocation = coordinateSystem.ConvertLocation(tempSample, RUISDevice.Oculus_DK2);
            this.transform.localPosition = convertedLocation;

            if (OVRManager.capiHmd != null)
            {
                try
                {
                    this.transform.localRotation = OVRManager.capiHmd.GetTrackingState().HeadPose.ThePose.Orientation.ToQuaternion();
                }
                catch (System.Exception e)
                {
                    Debug.LogError(e.Message);
                }
            }
        }
    }
Ejemplo n.º 2
0
    private Vector3 getSample(RUISDevice device)
    {
        Vector3 sample = new Vector3(0, 0, 0);
        Vector3 tempSample;

        if (device == RUISDevice.Kinect_1)
        {
            OpenNI.SkeletonJointPosition jointPosition;
            bool success = kinectSelection.GetPlayer(0).GetSkeletonJointPosition(OpenNI.SkeletonJoint.RightHand, out jointPosition);
            if (success && jointPosition.Confidence >= 0.5)
            {
                tempSample = coordinateSystem.ConvertRawKinectLocation(jointPosition.Position);
                if (Vector3.Distance(tempSample, lastKinectSample) > 0.1)
                {
                    sample                 = tempSample;
                    lastKinectSample       = sample;
                    this.guiTextUpperLocal = "";
                }
                else
                {
                    this.guiTextUpperLocal = "Not enough hand movement.";
                }
            }
        }
        if (device == RUISDevice.Oculus_DK2)
        {
            Ovr.Posef headpose = RUISOVRManager.ovrHmd.GetTrackingState().HeadPose.ThePose;
            float     px       = headpose.Position.x;
            float     py       = headpose.Position.y;
            float     pz       = -headpose.Position.z;   // This needs to be negated TODO: might change with future OVR version

            tempSample = new Vector3(px, py, pz);
            tempSample = coordinateSystem.ConvertRawOculusDK2Location(tempSample);
            if ((Vector3.Distance(tempSample, lastOculusDK2Sample) > 0.1) &&
                (RUISOVRManager.ovrHmd.GetTrackingState().StatusFlags & (uint)StatusBits.PositionTracked) != 0)                // Code from OVRManager.cs
            {
                sample = tempSample;
                lastOculusDK2Sample    = sample;
                this.guiTextUpperLocal = "";
            }
            else
            {
                this.guiTextUpperLocal = "Not enough hand movement.";
            }
        }

        return(sample);
    }
Ejemplo n.º 3
0
    private Vector3 getSample(RUISDevice device)
    {
        Vector3 sample = new Vector3(0, 0, 0);
        Vector3 tempSample;

        if (device == RUISDevice.Oculus_DK2)
        {
            Ovr.Posef headpose = RUISOVRManager.ovrHmd.GetTrackingState().HeadPose.ThePose;
            float     px       = headpose.Position.x;
            float     py       = headpose.Position.y;
            float     pz       = -headpose.Position.z;   // This needs to be negated TODO: might change with future OVR version

            tempSample = new Vector3(px, py, pz);
            tempSample = coordinateSystem.ConvertRawOculusDK2Location(tempSample);

            if ((Vector3.Distance(tempSample, lastOculusDK2Sample) > 0.1) &&
                (RUISOVRManager.ovrHmd.GetTrackingState().StatusFlags & (uint)StatusBits.PositionTracked) != 0)
            {
                sample = tempSample;
                lastOculusDK2Sample    = sample;
                this.guiTextUpperLocal = "";
            }
            else
            {
                this.guiTextUpperLocal = "Not enough hand movement.";
            }
        }
        if (device == RUISDevice.PS_Move)
        {
            if (psMoveWrapper.sphereVisible[calibratingPSMoveControllerId] &&
                psMoveWrapper.handleVelocity[calibratingPSMoveControllerId].magnitude <= 10.0f)
            {
                tempSample = coordinateSystem.ConvertRawPSMoveLocation(psMoveWrapper.handlePosition[calibratingPSMoveControllerId]);

                if (Vector3.Distance(tempSample, lastPSMoveSample) > 0.1)
                {
                    sample                 = tempSample;
                    lastPSMoveSample       = sample;
                    this.guiTextUpperLocal = "";
                }
                else
                {
                    this.guiTextUpperLocal = "Not enough hand movement.";
                }
            }
        }
        return(sample);
    }
Ejemplo n.º 4
0
		private static extern void ovrHmd_GetEyeTimewarpMatrices(
				IntPtr hmd,
			   	Eye eye,
			   	Posef renderPose,
				[MarshalAs(UnmanagedType.LPArray, SizeConst = 2)]
				[Out] Matrix4f_Raw[] twnOut);
Ejemplo n.º 5
0
        /// <summary>
		/// Computes timewarp matrices used by distortion mesh shader, these are used to adjust
		/// for head orientation change since the last call to ovrHmd_GetEyePoses
		/// when rendering this eye. The ovrDistortionVertex::TimeWarpFactor is used to blend between the
		/// matrices, usually representing two different sides of the screen.
		/// Must be called on the same thread as ovrHmd_BeginFrameTiming.
        /// </summary>
		public Matrix4f[] GetEyeTimewarpMatrices(Eye eye, Posef renderPose)
		{
			Matrix4f_Raw[] rawMats = {new Matrix4f_Raw(), new Matrix4f_Raw()};
			ovrHmd_GetEyeTimewarpMatrices(HmdPtr, eye, renderPose, rawMats);

			Matrix4f[] mats = {new Matrix4f(rawMats[0]), new Matrix4f(rawMats[1])};
			return mats;
		}
Ejemplo n.º 6
0
        /// <summary>
		/// Ends a frame, submitting the rendered textures to the frame buffer.
		/// - RenderViewport within each eyeTexture can change per frame if necessary.
		/// - 'renderPose' will typically be the value returned from ovrHmd_GetEyePoses,
		///   ovrHmd_GetHmdPosePerEye but can be different if a different head pose was
	   	///   used for rendering.
		/// - This may perform distortion and scaling internally, assuming is it not
		///   delegated to another thread.
		/// - Must be called on the same thread as BeginFrame.
		/// - *** This Function will call Present/SwapBuffers and potentially wait for GPU Sync ***.
        /// </summary>
		public void EndFrame(Posef[] renderPose, Texture[] eyeTexture)
		{
            Texture_Raw[] raw = new Texture_Raw[eyeTexture.Length];
            for (int i = 0; i < eyeTexture.Length; i++)
            {
                raw[i] = eyeTexture[i].ToRaw();
            }
            
            ovrHmd_EndFrame(HmdPtr, renderPose, raw);
		}
    private Vector3 getSample(RUISDevice device)
    {
        Vector3 sample = new Vector3(0, 0, 0);
        Vector3 tempSample;

        updateBodyData();
        if (device == RUISDevice.Kinect_2)
        {
            Kinect.Body[] data             = kinect2SourceManager.GetBodyData();
            bool          trackedBodyFound = false;
            int           foundBodies      = 0;
            foreach (var body in data)
            {
                foundBodies++;
                if (body.IsTracked)
                {
                    if (trackingIDtoIndex[body.TrackingId] == 0)
                    {
                        trackedBodyFound = true;
                        if (body.Joints[Kinect.JointType.HandRight].TrackingState == Kinect.TrackingState.Tracked)
                        {
                            tempSample = new Vector3(body.Joints[Kinect.JointType.HandRight].Position.X,
                                                     body.Joints[Kinect.JointType.HandRight].Position.Y,
                                                     body.Joints[Kinect.JointType.HandRight].Position.Z);
                            tempSample = coordinateSystem.ConvertRawKinect2Location(tempSample);
                            if (Vector3.Distance(tempSample, lastKinect2Sample) > 0.1)
                            {
                                sample            = tempSample;
                                lastKinect2Sample = sample;
                                device1Error      = false;
                                if (!device2Error)
                                {
                                    this.guiTextUpperLocal = "";
                                }
                            }
                            else
                            {
                                device1Error           = true;
                                this.guiTextUpperLocal = "Not enough hand movement.";
                            }
                        }
                    }
                }
            }
            if (!trackedBodyFound && foundBodies > 1)
            {
                device1Error           = true;
                this.guiTextUpperLocal = "Step out of the Kinect's\nview and come back.";
            }
        }
        if (device == RUISDevice.Oculus_DK2)
        {
            Ovr.Posef headpose = RUISOVRManager.ovrHmd.GetTrackingState().HeadPose.ThePose;
            float     px       = headpose.Position.x;
            float     py       = headpose.Position.y;
            float     pz       = -headpose.Position.z;   // This needs to be negated TODO: might change with future OVR version

            tempSample = new Vector3(px, py, pz);
            tempSample = coordinateSystem.ConvertRawOculusDK2Location(tempSample);

            if ((Vector3.Distance(tempSample, lastOculusDK2Sample) > 0.1) &&
                (RUISOVRManager.ovrHmd.GetTrackingState().StatusFlags & (uint)StatusBits.PositionTracked) != 0)                // Code from OVRManager.cs
            {
                sample = tempSample;
                lastOculusDK2Sample = sample;
                device2Error        = false;
                if (!device1Error)
                {
                    this.guiTextUpperLocal = "";
                }
            }
            else
            {
                device2Error           = true;
                this.guiTextUpperLocal = "Not enough hand movement.";
            }
        }

        return(sample);
    }
Ejemplo n.º 8
0
Archivo: OvrCapi.cs Proyecto: uxl/rdgvr
 private static extern void ovrHmd_GetEyeTimewarpMatricesDebug(
     IntPtr hmd,
     Eye eye,
     Posef renderPose,
     Quatf extraQuat,
     [MarshalAs(UnmanagedType.LPArray, SizeConst = 2)]
     [Out] Matrix4f_Raw[] twnOut,
     double debugTimingOffsetInSeconds);
Ejemplo n.º 9
0
Archivo: OvrCapi.cs Proyecto: uxl/rdgvr
        public Matrix4f[] GetEyeTimewarpMatricesDebug(Eye eye, Posef renderPose, Quatf extraQuat, double debugTimingOffsetInSeconds)
        {
            Matrix4f_Raw[] rawMats = {new Matrix4f_Raw(), new Matrix4f_Raw()};
            ovrHmd_GetEyeTimewarpMatricesDebug(HmdPtr, eye, renderPose, extraQuat, rawMats, debugTimingOffsetInSeconds);

            Matrix4f[] mats = {new Matrix4f(rawMats[0]), new Matrix4f(rawMats[1])};
            return mats;
        }