private void Timer_Tick(object sender, EventArgs e) { // check if the PoseRecognized is properly initialized if (!initialized) { return; } // a boolean flag to see if the next frame from the Kinect was successfully processed bool nextFrame = false; try { // try to process the next frame from the Kinect // this will do everyintg: detect people, extract skeletons/joints, // recognize the current pose, etc. nextFrame = poseRecognizer.processNextFrame(); } catch { MessageBox.Show("next frame exception"); timer.Stop(); return; } if (nextFrame) { Bitmap frame = null; try { // this is how you can extract the current frame (image) from the Kinect switch (toggleType) { case ImageType.Skeleton: // extract the "index" image with the user's outline and overlaid skeleton frame = poseRecognizer.getSkeletonImage(); break; case ImageType.Rgb: // extract the normal 1080p RGB frame frame = poseRecognizer.getRgbFrame(); break; case ImageType.Depth: // extract the depth map frame = poseRecognizer.getDepthMap(); break; default: break; } } catch { MessageBox.Show("get frame exception"); } bool userPresent = false; try { // check if there is at least one user present in the Kinect's point of view userPresent = poseRecognizer.userPresent(); } catch { MessageBox.Show("user present exception"); } label2.Text = "User present: " + userPresent.ToString(); if (userPresent) { /* * populate the list of currently seen users: * this list will only contain the 64-bit (8-byte) IDs of the users * these IDs are generated by the Kinect API and are unique for every * currently tracked user * You can use these IDs to check how many users are present and * to access the properties (pose, hands, etc.) for specific users by IDs */ UInt64[] list = poseRecognizer.getUserList(); try { // this will recognize the pose (out of all available ones) for THE FIRST SEEN user label5AllPoses.Text = poseRecognizer.getPoseName(); // You can also ask to recognize the pose of a specific user by his ID: // string poseName = poseRecognizer.getPoseName(list[0]); // recognize one specified pose and get the similarity score in range [0..1]: if (comboBox1.Text != "") { string poseName = comboBox1.Text; // "expectPose(String poseName)" will return the similarity score for the pose in range [0..1] // this is also done for THE FIRST AVAILABLE user (i.e. the user that was seen first by the Kinect) label3.Text = poseName.ElementAt <char>(0) + ": " + poseRecognizer.expectPose(poseName); // again, we can do that for a specific user: // double score = poseRecognizer.expectPose(list[0], "Flute"); /* * For every user there's also a whole bunch of properties available, such as: * - hand state (unknown, untracked, open, closed, lasso) * - is the hand moving? (boolean) * - hand movement speed (I think it's in m/sec, but I'm not sure) * - hand movement direction (up, down, left, right, forwards, backwards) * - did the hand just change movement direction? */ Direction rightHandMovementDirection = poseRecognizer.getHandDirection(list[0], Hand.RIGHT); if (rightHandMovementDirection == Direction.RIGHT) { // Hand is moving right } double rightHandMovementSpeed = poseRecognizer.getHandMovementSpeed(list[0], Hand.RIGHT); if (rightHandMovementSpeed > 0.85) { // the hand is moving very fast! } bool isRightHandMoving = poseRecognizer.isHandMoving(list[0], Hand.RIGHT); if (!isRightHandMoving) { // right hand stopped moving } bool rightHandChangedMovementDirection = poseRecognizer.handChangedDirection(list[0], Hand.RIGHT); if (rightHandChangedMovementDirection) { // hand changed direction! Do something! Play a different sound, show some effect! } HandState rightHandState = poseRecognizer.getHandState(list[0], Hand.RIGHT); if (rightHandState == HandState.Closed) { // right hand is clenched into a fist! } } // see how the joints for the user are extracted: if (toggleType == ImageType.Skeleton) { drawSkeleton(ref frame); } } catch (NullReferenceException ex) { MessageBox.Show("get pose exception"); } } pictureBox1.Image = frame as Image; } timer.Start(); }
private void Timer_Tick(object sender, EventArgs e) { if (!initialized) { return; } bool nextFrame = false; try { nextFrame = poseRecognizer.processNextFrame(); } catch { MessageBox.Show("next frame exception"); timer.Stop(); return; } if (nextFrame) { Bitmap frame = null; try { //frame = (toggleRgb) ? poseRecognizer.getSkeletonImage() : poseRecognizer.getDepthMap(); switch (toggleType) { case ImageType.Skeleton: frame = poseRecognizer.getSkeletonImage(); break; case ImageType.Rgb: frame = poseRecognizer.getRgbFrame(); //frame = poseRecognizer.getSkeletonImage(); break; case ImageType.Depth: frame = poseRecognizer.getDepthMap(); break; default: break; } } catch { MessageBox.Show("get frame exception"); } bool userPresent = false; try { userPresent = poseRecognizer.userPresent(); } catch { MessageBox.Show("user present exception"); } label2.Text = "Users present: " + userList.Count; if (userPresent) { // populate the list of currently seen users: UInt64[] list = poseRecognizer.getUserList(); updateUsers(list); foreach (UInt64 user in list) { playMusic(user); } try { // recognize the currect pose out of all available poses label5AllPoses.Text = poseRecognizer.getPoseName(); // recognize one specified pose and get the similarity score in range [0..1]: if (comboBox1.Text != "") { string poseName = comboBox1.Text; // "expectPose(String poseName)" will return the similarity score for the pose label3.Text = poseName.ElementAt <char>(0) + ": " + poseRecognizer.expectPose(poseName); } // see how the joints for the user are extracted: if (toggleType == ImageType.Skeleton) { drawSkeleton(ref frame); } } catch (NullReferenceException ex) { MessageBox.Show("get pose exception"); } } pictureBox1.Image = frame as Image; } timer.Start(); }