void audioDemoTracking(Skeleton2DdataCoordEventArgs e) { Point [] points = e._points; //Do something for each joint for (int i = 0; i < points.Count<Point>(); i++) { //Normalize Point tmp = normalize_skeleton_point(points[i]); if (i == 0) //handleft { } else if (i == 1)//wrist left { } else if (i == 2)//elbow left { } else if (i == 3)//elbowright { } else if (i == 4)//wristright { } else if (i == 5)//handright { if (isPlaying) { //Shift up by about half for nicer looking demo //mp.Volume = Math.Abs(tmp.Y+0.5); //mp.Balance = tmp.X; } } } }
//Send packets void Skeleton2DFrameReady_Do_Packet_Send(object sender, Skeleton2DdataCoordEventArgs e) { //If we are not setup to transmit transmitting, quit if (transmitter_ipendpoint == null) { return; } //Get the OSC bundle OscBundle bundle = skeleton2DdataCoordEventArgsToOSCBundle(e); //Send it bundle.Send(transmitter_ipendpoint); //osc_transmitter.Send(bundle); sent_bundles++; }
//Return an OSC bundle for skeleton index provided /* OscBundle skeletonToOSCBundle(Skeleton s, int index) { Skeleton2DdataCoordEventArgs e = Skeleton2DDataExtract.skeletonToSkeleton2DdataCoordEventArgs(s, index); return skeleton2DdataCoordEventArgsToOSCBundle(e); } */ OscBundle skeleton2DdataCoordEventArgsToOSCBundle(Skeleton2DdataCoordEventArgs e) { //Bundles are collections of messages OscBundle bundle = new OscBundle(transmitter_ipendpoint); //Messages are like packets, label with their type by method name OscMessage message = new OscMessage(transmitter_ipendpoint, SKELETON_METHOD); //Both bundles and messages can be nested //Must have at least one bundle and on message //Send the whole skeleton //Send packet number message.Append(sent_bundles); //Debug.WriteLine("Sent bundle: " + sent_bundles); //Send the index message.Append(e.index); //Then append each point in order, x then y int num_points = e._points.Count(); for (int i = 0; i < num_points; i++) { Point normalized_point = normalize_skeleton_point(e._points[i]); message.Append((float)normalized_point.X); message.Append((float)normalized_point.Y); } message.Append(kinect_id); //#19 //There are 19 things appended to this message. If you add more, //you must change the numSkeletonOSCArgs in the c++ code bundle.Append(message); return bundle; }
string recognizeGesture(Skeleton2DdataCoordEventArgs e) { //Check that an appropriate number of frame have been captured for this skeleton //And that we are not currently recording a gesture if ((skeleton_buffers[e.index].Count > MIN_NUM_FRAMES) && !recording_gesture) { //Get the string name of the gesture in the video buffer return DTWRecognizers[e.index].Recognize(skeleton_buffers[e.index]); } else { return UNKNOWN_GESTURE_NAME; } }
//Event that actual calls the dtw processor to do work void Skeleton2DFrameReady_Do_DTW_Process(object sender, Skeleton2DdataCoordEventArgs e) { //arg contains shoulder ponts at index 6 and 7 //make new array that does not have them Skeleton2DdataCoordEventArgs newe = new Skeleton2DdataCoordEventArgs(e._points, e.index, e.center_point, e.distance_normalizer); Point[] newpoints = new Point[6]; for(int i =0 ; i < 6; i++) { newpoints[i]=e._points[i]; } newe._points = newpoints; //Pass 2d points to audio demo fnuction audioDemoTracking(newe); //Update the skeleton buffers updateSkeletonBuffersPreRecognize(newe); string gesture = recognizeGesture(newe); //If we don't recognize the gesture //Reset the buffer somewhere in here? if (!gesture.Contains(UNKNOWN_GESTURE_NAME)) { //Do something with the gesture name here largeStatusText.Text = gesture; } else { //Not recognized } //Update buffer updateSkeletonBuffersPostRecognize(newe); }
void updateSkeletonBuffersPostRecognize(Skeleton2DdataCoordEventArgs e) { //Slide the window after addignthe frame at the end of the buffer if (skeleton_buffers[e.index].Count > SKELETON_BUFFER_LENGTH) { // If we are currently capturing and we reach the maximum buffer size then automatically store if (recording_gesture) { //We are capturing and have reached the maximum number of frame //Write data into recognizer //This function checks which skeleton, name...etc. addGestureToRecognizer(); } else { //Remove the first frame in the buffer to slide window skeleton_buffers[e.index].RemoveAt(0); } } }
void updateSkeletonBuffersPreRecognize(Skeleton2DdataCoordEventArgs e) { //Note: skeleton points sometimes come in as NaN - check this if (!double.IsNaN(e.GetPoint(0).X)) { //Only store one out of every IGNORE_FRAMES frames IGNORE_COUNTERS[e.index] = (IGNORE_COUNTERS[e.index] + 1) % IGNORE_FRAMES; if (IGNORE_COUNTERS[e.index] == 0) { //Store this current single skeleton info in the appropriate skeleton buffer skeleton_buffers[e.index].Add(e.GetCoords()); } } }
OscBundle skeleton2DdataCoordEventArgsToOSCBundle(Skeleton2DdataCoordEventArgs e) { //Bundles are collections of messages OscBundle bundle = new OscBundle(transmitter_ipendpoint); //Messages are like packets, label with their type by method name OscMessage message = new OscMessage(transmitter_ipendpoint, DEMO_METHOD); //Both bundles and messages can be nested //Must have at least one bundle and on message //Send the whole skeleton //First send the index message.Append(e.index); //Then append each point in order, x then y int num_points = e._points.Count(); for (int i = 0; i < num_points; i++) { Point normalized_point = normalize_skeleton_point(e._points[i]); message.Append((float)normalized_point.X); message.Append((float)normalized_point.Y); } bundle.Append(message); return bundle; }
//Event that actual calls the dtw processor to do work void Skeleton2DFrameReady_Do_DTW_Process(object sender, Skeleton2DdataCoordEventArgs e) { //Pass 2d points to audio demo fnuction audioDemoTracking(e); //Update the skeleton buffers updateSkeletonBuffersPreRecognize(e); string gesture = recognizeGesture(e); //If we don't recognize the gesture //Reset the buffer somewhere in here? if (!gesture.Contains(UNKNOWN_GESTURE_NAME)) { //Do something with the gesture name here largeStatusText.Text = gesture; } else { //Not recognized } //Update buffer updateSkeletonBuffersPostRecognize(e); }
void audioDemoTracking(Skeleton2DdataCoordEventArgs e) { Point [] points = e._points; //Do something for each joint for (int i = 0; i < points.Count<Point>(); i++) { //Convert back to pure pixels Point tmp = new Point((points[i].X * e.distance_normalizer) + e.center_point.X, (points[i].Y * e.distance_normalizer) + e.center_point.Y); //Convert to screen percentages //Debug.WriteLine(tmp.Y); //tmp.X =tmp.X / 640.0; //tmp.Y = tmp.Y / 480.0; //Debug.WriteLine(tmp.X); if (i == 0) //handleft { } else if (i == 1)//wrist left { } else if (i == 2)//elbow left { } else if (i == 3)//elbowright { } else if (i == 4)//wristright { } else if (i == 5)//handright { if (isPlaying) { mp.Volume = Math.Abs(tmp.Y); } } } }