コード例 #1
0
 public SkeletonFrame(int index, Body[] bodies, Floor floor, TimeSpan timeSpan)
 {
     this.index = index;
     this.floor = floor;
     this.relativeTime = timeSpan;
     this.skeletons = bodies.Select((body) => new Skeleton(body)).ToArray();
 }
コード例 #2
0
 public static Windows.Kinect.Vector4  ToMirror(this  Windows.Kinect.Vector4 vector)
 {
     return(new  Windows.Kinect.Vector4()
     {
         X = vector.X,
         Y = -vector.Y,
         Z = -vector.Z,
         W = vector.W
     });
 }
コード例 #3
0
 public SkeletonFrame(int index, Skeleton[] skeletons, float[] floor, long timestamp)
 {
     this.skeletons = skeletons;
     this.index = index;
     this.relativeTime = new TimeSpan(timestamp);
     Floor v = new Floor();
     v.W = floor[3];
     v.Z = floor[2];
     v.Y = floor[1];
     v.X = floor[0];
     this.floor = v;
 }
コード例 #4
0
ファイル: RUISInputManager.cs プロジェクト: mauromina/Ensayo
    private void updateKinect2FloorData()
    {
        Kinect2SourceManager kinect2SourceManager = FindObjectOfType(typeof(Kinect2SourceManager)) as Kinect2SourceManager;

        bool canAccessKinect2 = false;

        try
        {
            if (coordinateSystem != null && kinect2SourceManager != null &&
                kinect2SourceManager.GetSensor() != null && kinect2SourceManager.GetSensor().IsOpen&& kinect2SourceManager.GetSensor().IsAvailable)
            {
                canAccessKinect2 = true;
            }
        }
        catch {}

        if (canAccessKinect2)
        {
            if (coordinateSystem.rootDevice == RUISDevice.Kinect_2)
            {
                coordinateSystem.ResetFloorNormal(RUISDevice.Kinect_2);
                coordinateSystem.ResetDistanceFromFloor(RUISDevice.Kinect_2);
            }


            Windows.Kinect.Vector4 kinect2FloorPlane = kinect2SourceManager.GetFlootClipPlane();

            //print (kinect2FloorPlane.X + " " + kinect2FloorPlane.Y + " " + kinect2FloorPlane.Z  + " " +  kinect2FloorPlane.W);

            Vector3 kinect2FloorNormal = new Vector3(kinect2FloorPlane.X, kinect2FloorPlane.Y, kinect2FloorPlane.Z);
            kinect2FloorNormal.Normalize();

            float kinect2DistanceFromFloor = kinect2FloorPlane.W / Mathf.Sqrt(kinect2FloorNormal.sqrMagnitude);

            if (float.IsNaN(kinect2DistanceFromFloor))
            {
                kinect2DistanceFromFloor = 0;
            }
            if (float.IsNaN(kinect2FloorNormal.x) || kinect2FloorNormal.sqrMagnitude < 0.1f)
            {
                kinect2FloorNormal = Vector3.up;
            }

            if (coordinateSystem.rootDevice == RUISDevice.Kinect_2)
            {
                coordinateSystem.SetFloorNormal(kinect2FloorNormal, RUISDevice.Kinect_2);
                coordinateSystem.SetDistanceFromFloor(kinect2DistanceFromFloor, RUISDevice.Kinect_2);
            }
            Debug.Log("Updated Kinect 2 floor normal " + kinect2FloorNormal + " and floor distance (" + kinect2DistanceFromFloor + ")");
        }
    }
コード例 #5
0
    void Update () 
    {
        if (_Reader != null)
        {
        	var frame = _Reader.AcquireLatestFrame();
            if (frame != null)
            {
				isNewFrame = true;
            	// Update body data
				var bodyFrame = frame.BodyFrameReference.AcquireFrame();
				if(bodyFrame  != null) {
					floorClipPlane = bodyFrame.FloorClipPlane;
	                if (_BodyData == null)
	                {
	                    _BodyData = new Body[_Sensor.BodyFrameSource.BodyCount];
	                }
					bodyFrame.GetAndRefreshBodyData(_BodyData);
					bodyFrame.Dispose();
					bodyFrame = null;
	            }
	            // Update depth data
				var depthFrame = frame.DepthFrameReference.AcquireFrame();
	           	if(depthFrame != null) {
					if(_DepthData == null) {
						_DepthData = new ushort[_Sensor.DepthFrameSource.FrameDescription.LengthInPixels];
					}
					depthFrame.CopyFrameDataToArray(_DepthData);
					depthFrame.Dispose();
					depthFrame = null;
	           	}    
	            // Update body index data
				var bodyIndexFrame = frame.BodyIndexFrameReference.AcquireFrame();
				if(bodyIndexFrame != null) {
					if(_BodyIndexData == null) {
						_BodyIndexData = new byte[_Sensor.BodyIndexFrameSource.FrameDescription.LengthInPixels];
					}
					bodyIndexFrame.CopyFrameDataToArray(_BodyIndexData);
					bodyIndexFrame.Dispose();
					bodyIndexFrame = null;
				}       
	            frame = null;
            }
            else
				isNewFrame = false;
        }    
    }
コード例 #6
0
    void Update()
    {
        if (_Reader != null)
        {
            var frame = _Reader.AcquireLatestFrame();
            if (frame != null)
            {
                if (_Data == null)
                {
                    _Data = new Body[_Sensor.BodyFrameSource.BodyCount];
                }

                frame.GetAndRefreshBodyData(_Data);

                // FloorClipPlaneを取得する
                FloorClipPlane = frame.FloorClipPlane;

                frame.Dispose();
                frame = null;
            }
        }
    }
コード例 #7
0
 public Windows.Kinect.Vector4 ToKinectOrientation()
 {
     Windows.Kinect.Vector4 vec = new Windows.Kinect.Vector4 ();
     vec.X = orientation.x;
     vec.Y = orientation.y;
     vec.Z = orientation.z;
     vec.W = orientation.w;
     return vec;
 }
コード例 #8
0
ファイル: Vector4.cs プロジェクト: Bebebop/Us
 public bool Equals(Vector4 obj)
 {
     return(X.Equals(obj.X) && Y.Equals(obj.Y) && Z.Equals(obj.Z) && W.Equals(obj.W));
 }
コード例 #9
0
 public static UnityEngine.Quaternion ToUnityQuaternion(this Windows.Kinect.Vector4 vector)
 {
     return(new UnityEngine.Quaternion(vector.X, vector.Y, vector.Z, vector.W));
 }
コード例 #10
0
 UnityEngine.Vector4 GetVirtualAngle(Windows.Kinect.Vector4 a)
 {
     UnityEngine.Vector4 output = new UnityEngine.Vector4(a.X, a.Y, a.Z, a.W);
     return(output);
 }
コード例 #11
0
    public void UpdateBodyData(Kinect.Body body)
    {
        LastBodyData = body;

        var jointUpdates      = body.Joints; //the kinect sdk makes a new dictionary EVERY CALL to .Joints and .JointOrientations
        var jointOrientations = body.JointOrientations;

        for (Kinect.JointType jt = Kinect.JointType.SpineBase; jt <= Kinect.JointType.ThumbRight; jt++)
        {
            Kinect.Joint   sourceJoint       = jointUpdates[jt];
            Kinect.Vector4 sourceOrientation = jointOrientations[jt].Orientation;
            Kinect.Joint?  targetJoint       = null;

            if (JointConnections.ContainsKey(jt))
            {
                targetJoint = jointUpdates[JointConnections[jt]];
            }

            Transform  jointObj    = JointMapping[jt];
            Vector3    mewPosition = Unmirror(sourceJoint.Position.GetVector3()) * BodyScale;
            Quaternion mewRotation = sourceOrientation.GetQuaternion();
            if (FirstDataSet)
            {
                jointObj.localPosition = mewPosition;
                jointObj.localRotation = mewRotation;
            }
            else
            {
                jointObj.localPosition = Vector3.Lerp(jointObj.localPosition, mewPosition, Time.deltaTime * PositionSmoothing);
                jointObj.localRotation = Quaternion.Lerp(jointObj.localRotation, mewRotation, Time.deltaTime * RotationSmoothing);
            }

            if (ShowJointConnections == true)
            {
                LineRenderer lr = JointLineMapping[jt];
                if (targetJoint.HasValue)
                {
                    Transform targetJointObj = JointMapping[targetJoint.Value.JointType];
                    lr.SetPosition(0, jointObj.position);
                    lr.SetPosition(1, targetJointObj.position);
                    lr.SetColors(GetColorForState(sourceJoint.TrackingState), GetColorForState(targetJoint.Value.TrackingState));
                }
                else
                {
                    lr.enabled = false;
                }
            }
        }

        if (FirstDataSet == true)
        {
            FirstDataSet = false;

            Initialize();
        }

        if (CurrentlyActiveAvatar == true)
        {
            OVRManager.instance.transform.position = JointMapping[Kinect.JointType.Head].position;
        }
    }
コード例 #12
0
 public SkeletonJoint(JointType type, int confidence, CameraSpacePoint position, Orientation orientation)
 {
     this.Type = type;
     this.Confidence = confidence;
     this.Position = new Vector3(position.X, position.Y, position.Z);
     this.Rotation = new Quaternion(orientation.X, orientation.Y, orientation.Z, orientation.W);
     this.State = 0;
 }
コード例 #13
0
    public void JointUpdate(Dictionary <Kinect.JointType, Kinect.Joint> joints, Dictionary <Kinect.JointType, Kinect.JointOrientation> orients, int JointID, Kinect.JointType type, Kinect.TrackingState state, TRANSFORM_SMOOTH_PARAMETERS smoothingParams, int body_id = 0)
    {
        //printf("body: %d\n", body_id);
        Vector3 vPrevRawPosition;
        Vector3 vPrevFilteredPosition;
        Vector3 vPrevTrend;
        Vector3 vRawPosition;
        Vector3 vFilteredPosition;
        Vector3 vPredictedPosition;
        Vector3 vDiff;
        Vector3 vTrend;
        float   vLength;
        float   fDiff;
        bool    bJointIsValid;

        Quaternion filteredOrientation;
        Quaternion trend;
        Quaternion rawOrientation = new Quaternion(orients[(Kinect.JointType)JointID].Orientation.X, orients[(Kinect.JointType)JointID].Orientation.Y, orients[(Kinect.JointType)JointID].Orientation.Z, orients[(Kinect.JointType)JointID].Orientation.W);

        Quaternion prevFilteredOrientation = m_pHistory[body_id, JointID].m_vFilteredOrientation;
        Quaternion prevTrend = m_pHistory[body_id, JointID].m_vTrendOrientation;

        Kinect.Joint            joint  = joints[(Kinect.JointType)JointID];
        Kinect.JointOrientation orient = orients[(Kinect.JointType)JointID];

        vRawPosition          = new Vector4(joint.Position.X, joint.Position.Y, joint.Position.Z, 0.0f);
        vPrevFilteredPosition = m_pHistory[body_id, JointID].m_vFilteredPosition;
        vPrevTrend            = m_pHistory[body_id, JointID].m_vTrend;
        vPrevRawPosition      = m_pHistory[body_id, JointID].m_vRawPosition;
        bJointIsValid         = JointPositionIsValid(vRawPosition);

        // If joint is invalid, reset the filter
        if (!bJointIsValid)
        {
            rawOrientation = m_pHistory[body_id, JointID].m_vFilteredOrientation;
            m_pHistory[body_id, JointID].m_dwFrameCount = 0;
        }

        // Initial start values
        if (m_pHistory[body_id, JointID].m_dwFrameCount == 0)
        {
            vFilteredPosition = vRawPosition;
            vTrend            = Vector3.zero;
            m_pHistory[body_id, JointID].m_dwFrameCount++;

            filteredOrientation = rawOrientation;
            trend = new Quaternion(0.0f, 0.0f, 0.0f, 1.0f);
        }
        else if (m_pHistory[body_id, JointID].m_dwFrameCount == 1)
        {
            vFilteredPosition = (vRawPosition + vPrevRawPosition) * 0.5f;
            vDiff             = vFilteredPosition - vPrevFilteredPosition;
            vTrend            = (vDiff * smoothingParams.fCorrection) + vPrevTrend * (1.0f - smoothingParams.fCorrection);
            m_pHistory[body_id, JointID].m_dwFrameCount++;

            Quaternion prevRawOrientation = m_pHistory[body_id, JointID].m_vRawOrientation;
            filteredOrientation = EnhansedQuaternionSlerp(prevRawOrientation, rawOrientation, 0.5f);

            Quaternion diffStarted = RotationBetweenQuaternions(filteredOrientation, prevFilteredOrientation);
            trend = EnhansedQuaternionSlerp(prevTrend, diffStarted, smoothingParams.fCorrection);
        }
        else
        {
            // First apply jitter filter
            vDiff   = vRawPosition - vPrevFilteredPosition;
            vLength = Mathf.Sqrt(vDiff.sqrMagnitude);
            fDiff   = Mathf.Abs(vLength);

            if (fDiff <= smoothingParams.fJitterRadius)
            {
                vFilteredPosition = vRawPosition * (fDiff / smoothingParams.fJitterRadius) + vPrevFilteredPosition * (1.0f - fDiff / smoothingParams.fJitterRadius);
            }
            else
            {
                vFilteredPosition = vRawPosition;
            }

            Quaternion diffJitter    = RotationBetweenQuaternions(rawOrientation, prevFilteredOrientation);
            float      diffValJitter = Mathf.Abs(QuaternionAngle(diffJitter));
            if (diffValJitter <= smoothingParams.fJitterRadius)
            {
                filteredOrientation = EnhansedQuaternionSlerp(prevFilteredOrientation, rawOrientation, diffValJitter / smoothingParams.fJitterRadius);
            }
            else
            {
                filteredOrientation = rawOrientation;
            }

            // Now the double exponential smoothing filter
            vFilteredPosition = vFilteredPosition * (1.0f - smoothingParams.fSmoothing) + (vPrevFilteredPosition + vPrevTrend) * smoothingParams.fSmoothing;


            vDiff  = vFilteredPosition - vPrevFilteredPosition;
            vTrend = ((vDiff * smoothingParams.fCorrection) + (vPrevTrend * (1.0f - smoothingParams.fCorrection)));

            // Now the double exponential smoothing filter
            filteredOrientation = EnhansedQuaternionSlerp(filteredOrientation, (prevFilteredOrientation * prevTrend), smoothingParams.fSmoothing);

            diffJitter = RotationBetweenQuaternions(filteredOrientation, prevFilteredOrientation);
            trend      = EnhansedQuaternionSlerp(prevTrend, diffJitter, smoothingParams.fCorrection);
        }

        // Predict into the future to reduce latency
        vPredictedPosition = (vFilteredPosition + (vTrend * smoothingParams.fPrediction));
        Quaternion predictedOrientation = (filteredOrientation * (EnhansedQuaternionSlerp(new Quaternion(0.0f, 0.0f, 0.0f, 1.0f), trend, smoothingParams.fPrediction)));

        // Check that we are not too far away from raw data
        vDiff   = (vRawPosition - vPrevFilteredPosition);
        vLength = Mathf.Sqrt(vDiff.sqrMagnitude);
        fDiff   = Mathf.Abs(vLength);

        Quaternion diff    = RotationBetweenQuaternions(predictedOrientation, filteredOrientation);
        float      diffVal = Mathf.Abs(QuaternionAngle(diff));

        if (fDiff > smoothingParams.fMaxDeviationRadius)
        {
            vPredictedPosition = ((vPredictedPosition * (smoothingParams.fMaxDeviationRadius / fDiff)) + (vRawPosition * (1.0f - smoothingParams.fMaxDeviationRadius / fDiff)));
        }
        if (diffVal > smoothingParams.fMaxDeviationRadius)
        {
            predictedOrientation = EnhansedQuaternionSlerp(filteredOrientation, predictedOrientation, smoothingParams.fMaxDeviationRadius / diffVal);
        }
        predictedOrientation = QuaternionNormalise(predictedOrientation);
        filteredOrientation  = QuaternionNormalise(filteredOrientation);
        trend = QuaternionNormalise(trend);

        // Save the data from this frame
        m_pHistory[body_id, JointID].m_vRawPosition      = vRawPosition;
        m_pHistory[body_id, JointID].m_vFilteredPosition = vFilteredPosition;
        m_pHistory[body_id, JointID].m_vTrend            = vTrend;

        if (!(rawOrientation == null) && !(filteredOrientation == null) && !(trend == null))
        {
            m_pHistory[body_id, JointID].m_vRawOrientation      = rawOrientation;
            m_pHistory[body_id, JointID].m_vFilteredOrientation = filteredOrientation;
            m_pHistory[body_id, JointID].m_vTrendOrientation    = trend;
        }

        // Output the data
        //vPredictedPosition = new Vector4(vPredictedPosition , 1.0f);
        Kinect.Joint            j = new Windows.Kinect.Joint();
        Kinect.CameraSpacePoint CameraSpacePoint = new Windows.Kinect.CameraSpacePoint();

        CameraSpacePoint.X = vPredictedPosition.x;
        CameraSpacePoint.Y = vPredictedPosition.y;
        CameraSpacePoint.Z = vPredictedPosition.z;

        j.Position      = CameraSpacePoint;
        j.JointType     = type;
        j.TrackingState = state;
        m_pFilteredJoints[(Kinect.JointType)JointID] = j;


        // HipCenter has no parent and is the root of our skeleton - leave the HipCenter absolute set as it is
        if (type != Kinect.JointType.SpineBase)
        {
            Kinect.JointOrientation jo = new Windows.Kinect.JointOrientation();

            Kinect.Vector4 v4 = new Kinect.Vector4();
            v4.X = predictedOrientation.x;
            v4.Y = predictedOrientation.y;
            v4.Z = predictedOrientation.z;
            v4.W = predictedOrientation.w;

            jo.Orientation = v4;

            jo.JointType = type;
            m_pFilteredOrientations[(Kinect.JointType)JointID] = jo;
        }
        else
        {
            m_pFilteredOrientations[(Kinect.JointType)JointID] = orients[(Kinect.JointType)JointID];
        }
        //m_pFilteredJoints[JointID]
    }
コード例 #14
0
ファイル: BodySourceView.cs プロジェクト: Logomorph/GhettoVR
    private Vector3 EulerAnglesFromKinect(Kinect.Vector4 quaternation)
    {
        Quaternion quat = new Quaternion(quaternation.X, quaternation.Y, quaternation.Z, quaternation.W);

        return(quat.eulerAngles);
    }
コード例 #15
0
 public static Quaternion  ToQuaternion(this  Windows.Kinect.Vector4 vactor, Quaternion comp)
 {
     return(Quaternion.Inverse(comp) * new  Quaternion(-vactor.X, -vactor.Y, vactor.Z, vactor.W));
 }
コード例 #16
0
 public static UnityEngine.Vector4 ToUnityVector4(this Windows.Kinect.Vector4 vector)
 {
     return(new UnityEngine.Vector4(vector.X, vector.Y, vector.Z, vector.W));
 }
コード例 #17
0
 public static Quaternion ToQuaternion(this Vector4 vector4, bool mirror)
 {
     return(new Quaternion(vector4.X, (mirror ? -1 : 1) * vector4.Y, (mirror ? -1 : 1) * vector4.Z, vector4.W));
 }