private void InitializeKinect() { kinect = KinectSensor.GetDefault(); filter = new KinectJointFilter(); filter.Init(); if (kinect == null) { throw new Exception("Kinectを開けません"); } //選択デバイス情報を更新 Constants.deviceSelect = Constants.SET_KINECT; record = new KinectRecorder(main); gesture = new KinectGesture(main); //抜き差しイベントを設定 kinect.IsAvailableChanged += Kinect_IsAvailableChanged; handler = (s, e) => { ColorFrameReader_FrameArrived(s, e); }; //フレームの準備 PrepareFrame(); Constants.kinectImageRate = colorFrameDesc.Height / main.ImageColor.Height; kinectTimer.Start(); }
private void Start() { Offset = Vector3.zero; positionSmoothingfilter = new KinectJointFilter(); positionSmoothingfilter.Init(); IsTrackingHuman = false; }
void Start() { _BodyManager = FindObjectOfType <BodySourceManager>(); _Bodies = new Dictionary <ulong, Body>(); jointMaterial = Resources.Load("Materials/JointMaterial") as Material; boneMaterial = Resources.Load("Materials/BoneMaterial") as Material; filter = new KinectJointFilter(); filter.Init(); }
public KinectCursor() { MRect screenRect = new MRect(0, 0, SystemParameters.PrimaryScreenWidth, SystemParameters.PrimaryScreenHeight); cursorMapper = new CursorMapper(gestureRect, screenRect, CursorMapper.ScaleAlignment.LongerRange); sensorReader = new KinectReader(false); sensorReader.OnTrackedBody += Kinect_OnTrackedBody; sensorReader.OnLostTracking += Kinect_OnLostTracking; hoverTimer.Interval = TimeSpan.FromSeconds(HoverDuration); hoverTimer.Tick += new EventHandler(HoverTimer_Tick); kinectJointFilter = new KinectJointFilter(); kinectJointFilter.Reset(fJitterRadius: 0.03f, fMaxDeviationRadius: 0.05f); }
public void ListenBoneData() { if (sensor == null) { listener.OnLogMessageReceived("No sensor attached!"); return; } if (bodyFrameReader != null) { bodyFrameReader.IsPaused = false; } else { jointFilter = new KinectJointFilter(); bodyFrameReader = sensor.BodyFrameSource.OpenReader(); bodyFrameReader.FrameArrived += BodyFrameReaderOnFrameArrived; } listener.OnLogMessageReceived("Started listening to body frames"); }
void Awake() { m_jointFilter = new KinectJointFilter(); m_jointFilter.Init(0.55f, 0.25f, 2.0f, 0.30f, 1.25f); }
protected override void OnStart(string[] args) { // Try to open the first available Kinect sensor. this.kinect = KinectSensor.GetDefault(); if (this.kinect == null) { EventLog.WriteEntry("No Kinect device was detected."); ExitCode = -1; throw new KinectException("No kinect device was detected."); } else { this.filter = new KinectJointFilter(); this.filter.Init(0.5f, 0.5f, 0.5f, 0.05f, 0.04f); // change params if you want this.kinect.Open(); this.kinect.IsAvailableChanged += this.OnAvailableChanged; } // Register as a handler for the image data being returned by the Kinect. this.reader = this.kinect.OpenMultiSourceFrameReader(FrameSourceTypes.Color | FrameSourceTypes.Depth | FrameSourceTypes.Infrared | FrameSourceTypes.Body); this.audioSource = this.kinect.AudioSource; if (this.reader == null) { EventLog.WriteEntry("Unable to connect to Kinect data stream."); ExitCode = -2; throw new KinectException("Unable to connect to Kinect data stream."); } else { this.reader.MultiSourceFrameArrived += this.OnFrameArrived; } if (this.audioSource == null) { EventLog.WriteEntry("Unable to open audio source on kinect"); ExitCode = -3; throw new KinectException("Unable to connect to kinect audio source"); } else { this.audioReader = this.audioSource.OpenReader(); if (this.audioReader == null) { Console.WriteLine("Issues with audio reader"); } else { this.audioReader.FrameArrived += this.onAudioFrameArrived; } } // Allocate storage for the data from the Kinect. this.colorArray = new byte[(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL)]; this.depthArray = new ushort[this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width]; this.irArray = new ushort[this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width]; this.byteColorArray = new byte[(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL) + sizeof(double)]; this.byteDepthArray = new byte[this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL + sizeof(double)]; this.byteIRArray = new byte[this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL + sizeof(double)]; this.bodyArray = new Body[this.kinect.BodyFrameSource.BodyCount]; this.audioContainer = new AudioContainer(); this.audioContainer.samplingFrequency = 16000; this.audioContainer.frameLifeTime = 0.016; this.audioContainer.numSamplesPerFrame = (int)(this.audioContainer.samplingFrequency * this.audioContainer.frameLifeTime); this.audioContainer.numBytesPerSample = sizeof(float); this.audioContainer.audioStream = new float[256]; // Create network connectors that will send out the data when it is received. this.colorConnector = new AsyncNetworkConnector(Properties.Settings.Default.RgbImagePort); this.depthConnector = new AsyncNetworkConnector(Properties.Settings.Default.DepthImagePort); this.irConnector = new AsyncNetworkConnector(Properties.Settings.Default.IrImagePort); this.bodyConnector = new AsyncNetworkConnector(Properties.Settings.Default.BodyPort); this.audioConnector = new AsyncNetworkConnector(Properties.Settings.Default.AudioPort); // Open the server connections. this.colorConnector.Listen(); this.depthConnector.Listen(); this.irConnector.Listen(); this.bodyConnector.Listen(); this.audioConnector.Listen(); }
//update body from filtered data private void RefreshBodyObject(Kinect.Body body, KinectJointFilter bodyFilter, GameObject bodyObject, ulong id) { bodyFilter.UpdateFilter(body); filteredJoints = bodyFilter.GetFilteredJoints(); orientJoints.Clear(); orientJoints = CalculateJointRotations(body.JointOrientations); for (Kinect.JointType jt = Kinect.JointType.SpineBase; jt <= Kinect.JointType.ThumbRight; jt++) { filteredJointPos = GetVector3FromCameraSpacePoint(filteredJoints[(int)jt]); sourceJoint = body.Joints[jt]; targetJoint = null; filteredTargetJointPos = null; if (_BoneMap.ContainsKey(jt)) { targetJoint = body.Joints[_BoneMap[jt]]; filteredTargetJointPos = GetVector3FromCameraSpacePoint(filteredJoints[(int)_BoneMap[jt]]); } //jointObj = bodyObject.transform.Find(jt.ToString()); jointObj = jointTransforms[id][jt]; //calculate orientations of end joints that are not captured by the kinect if (zeroQuaternion.Equals(orientJoints[jt]) && filteredTargetJointPos.HasValue) { Vector3 direction = filteredJointPos - filteredTargetJointPos.Value; if (jt == Kinect.JointType.AnkleLeft || jt == Kinect.JointType.AnkleRight) //the ankle roations have to be pointed at the foot to match the mesh { if (jt == Kinect.JointType.AnkleLeft) { direction = GetVector3FromCameraSpacePoint(filteredJoints[(int)Kinect.JointType.FootLeft]) - filteredJointPos; } else { direction = GetVector3FromCameraSpacePoint(filteredJoints[(int)Kinect.JointType.FootRight]) - filteredJointPos; } Vector3 perpendicular = Vector3.Cross(direction, Vector3.up); Vector3 normal = Vector3.Cross(direction, perpendicular); if (normal.sqrMagnitude != 0 && direction.sqrMagnitude != 0) { orientJoints[jt] = Quaternion.LookRotation(normal, direction); //normal, direction } else { orientJoints[jt] = Quaternion.identity; } } else if (jt == Kinect.JointType.ThumbLeft || jt == Kinect.JointType.ThumbRight) //the thumbs are along their parents forward vector so they are calculated { Vector3 perpendicular = Vector3.Cross(direction, Vector3.up); Vector3 normal = Vector3.Cross(perpendicular, direction); if (normal.sqrMagnitude != 0 && direction.sqrMagnitude != 0) { orientJoints[jt] = Quaternion.LookRotation(normal, direction); } else { orientJoints[jt] = Quaternion.identity; } } else if (jt == Kinect.JointType.Neck) //rotate the neck from side to side while keeping the Z axis pointing forwards { Vector3 forward = orientJoints[_BoneMap[jt]] * Vector3.forward; Vector3 childFilteredJointPos = GetVector3FromCameraSpacePoint(filteredJoints[(int)_JointChildMap[jt]]); Vector3 y = childFilteredJointPos - filteredJointPos; orientJoints[jt] = Quaternion.LookRotation(forward, y); } else //by default set the up axis to point away from the joint and forward axis towards the parent's forward axis { Vector3 forward = orientJoints[_BoneMap[jt]] * Vector3.forward; // calculate a rotation, Y forward for Kinect if (direction.sqrMagnitude != 0) { orientJoints[jt] = Quaternion.LookRotation(forward, direction); } else { orientJoints[jt] = Quaternion.identity; } } } else //if joints are not computed in above, then point their Y value at the next joint while keeping their current Z value { if (_JointChildMap.ContainsKey(jt) && rotateToNextJoint) { Vector3 childFilteredJointPos = GetVector3FromCameraSpacePoint(filteredJoints[(int)_JointChildMap[jt]]); Vector3 y = childFilteredJointPos - filteredJointPos; orientJoints[jt] = Quaternion.LookRotation(orientJoints[jt] * Vector3.forward, y); } } //check tracking state of joint to make sure it is tracked, turn off rendered objects if not. if (debugJoints) { switch (sourceJoint.TrackingState) { case Kinect.TrackingState.NotTracked: if (jointObj.GetChild(0).gameObject.activeSelf) { jointObj.GetChild(0).gameObject.SetActive(useUntrackedJoints); } JointTracked[body.TrackingId][jt.ToString()] = useUntrackedJoints; break; case Kinect.TrackingState.Inferred: if (jointObj.GetChild(0).gameObject.activeSelf) { jointObj.GetChild(0).gameObject.SetActive(useInferredJoints); } jointObj.localPosition = filteredJointPos; jointObj.localRotation = orientJoints[jt]; JointTracked[body.TrackingId][jt.ToString()] = useInferredJoints; break; case Kinect.TrackingState.Tracked: if (!jointObj.GetChild(0).gameObject.activeSelf) { jointObj.GetChild(0).gameObject.SetActive(true); } jointObj.localPosition = filteredJointPos; jointObj.localRotation = orientJoints[jt]; JointTracked[body.TrackingId][jt.ToString()] = true; break; default: break; } LineRenderer lr = jointObj.GetComponent <LineRenderer>(); if (targetJoint.HasValue) { lr.useWorldSpace = false; lr.SetPosition(0, Vector3.zero); lr.SetPosition(1, Quaternion.Inverse(jointObj.localRotation) * (filteredTargetJointPos.Value - filteredJointPos)); lr.startColor = GetColorForState(sourceJoint.TrackingState); lr.endColor = GetColorForState(targetJoint.Value.TrackingState); } else { lr.enabled = false; } } else { switch (sourceJoint.TrackingState) { case Kinect.TrackingState.NotTracked: JointTracked[body.TrackingId][jt.ToString()] = useUntrackedJoints; break; case Kinect.TrackingState.Inferred: JointTracked[body.TrackingId][jt.ToString()] = useInferredJoints; break; case Kinect.TrackingState.Tracked: JointTracked[body.TrackingId][jt.ToString()] = true; break; default: break; } jointObj.localPosition = filteredJointPos; jointObj.localRotation = orientJoints[jt]; } /*if(jointObj.localRotation == zeroQuaternion) * { * Vector3 perpendicular = Vector3.Cross(jointObj.localPosition, Vector3.up); * Vector3 normal = Vector3.Cross(perpendicular, jointObj.localPosition); * * // calculate a rotation * jointObj.rotation.SetLookRotation(normal, jointObj.localPosition); * }*/ } }