// Initialization: void Start() { HUDText = GameObject.Find("HUDText").GetComponent <Text>(); me = GCHandle.Alloc(this); // Create a first gesture to record samples for string random_word = getRandomWord(); recording_gesture = gr.createGesture(random_word); HUDText.text = "\n\n[TOUCH AND HOLD HERE]\nto record gesture sample.\nGesture keyword:\n" + random_word; Input.gyro.enabled = true; }
public override void init(ref GestureRecognition gr) { this.completed = false; Sample_Pixie.HUDText.text = "Let's try it again!\nInvent a new gesture and do it 20 times.\n(0/20)"; int record_gesture_id = gr.createGesture("Peekaboo"); // record_gesture_id should be gestureid_peekaboo if (record_gesture_id != gestureid_peekaboo) { Sample_Pixie.HUDText.text = "[ERROR: FAILED TO CREATE NEW GESTURE]"; } this.recorded_samples = 0; }
public override void init(ref GestureRecognition gr) { this.completed = false; Sample_Pixie.HUDText.text = "Now teach your pixie something new!\nInvent a new gesture and do it 20 times.\n(0/20)"; this.recorded_samples = 0; int record_gesture_id = gr.createGesture("Flip"); // record_gesture_id should be gestureid_peekaboo if (record_gesture_id != gestureid_flip) { Sample_Pixie.HUDText.text = "[ERROR: FAILED TO CREATE NEW GESTURE]"; } }
// Update: void Update() { float escape = Input.GetAxis("escape"); if (escape > 0.0f) { Application.Quit(); } float trigger_left = Input.GetAxis("LeftControllerTrigger"); float trigger_right = Input.GetAxis("RightControllerTrigger"); // If recording_gesture is -3, that means that the AI has recently finished learning a new gesture. if (recording_gesture == -3) { // Show "finished" message. double performance = gr.recognitionScore(); HUDText.text = "Training finished!\n(Final recognition performance = " + (performance * 100.0) + "%)\nFeel free to use your new gesture."; // Set recording_gesture to -1 to indicate normal operation (learning finished). recording_gesture = -1; } // If recording_gesture is -2, that means that the AI is currently learning a new gesture. if (recording_gesture == -2) { // Show "please wait" message HUDText.text = "...training...\n(Current recognition performance = " + (last_performance_report * 100.0) + "%)\nPress the 'A'/'X'/Menu button to cancel training."; // In this mode, the user may press the "A/X/menu" button to cancel the learning process. bool button_a_left = Input.GetButton("LeftControllerButtonA"); bool button_a_right = Input.GetButton("RightControllerButtonA"); /* * if (button_a_left || button_a_right) { * // Button pressed: stop the learning process. * gr.stopTraining(); * recording_gesture = -3; * } */ return; } // Else: if we arrive here, we're not in training/learning mode, // so the user can draw gestures. // If recording_gesture is -1, we're currently not recording a new gesture. if (recording_gesture == -1) { bool button_a_left = Input.GetButton("LeftControllerButtonA"); bool button_a_right = Input.GetButton("RightControllerButtonA"); // In this mode, the user can press button A/X/menu to create a new gesture if (button_a_left || button_a_right) { recording_gesture = gr.createGesture("custom gesture " + (gr.numberOfGestures() - 3)); // from now on: recording a new gesture HUDText.text = "Learning a new gesture (custom gesture " + (recording_gesture - 4) + "):\nPlease perform the gesture 25 times.\n(0 / 25)"; } } // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet. if (active_controller == null) { // If the user presses either controller's trigger, we start a new gesture. if (trigger_right > 0.8) { // Right controller trigger pressed. active_controller = GameObject.Find("Right Hand"); } else if (trigger_left > 0.8) { // Left controller trigger pressed. active_controller = GameObject.Find("Left Hand"); } else { // If we arrive here, the user is pressing neither controller's trigger: // nothing to do. return; } // If we arrive here: either trigger was pressed, so we start the gesture. GameObject hmd = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject Vector3 hmd_p = hmd.transform.localPosition; Quaternion hmd_q = hmd.transform.localRotation; gr.startStroke(hmd_p, hmd_q, recording_gesture); } // If we arrive here, the user is currently dragging with one of the controllers. // Check if the user is still dragging or if he let go of the trigger button. if (trigger_left > 0.3 || trigger_right > 0.3) { // The user is still dragging with the controller: continue the gesture. Vector3 p = active_controller.transform.position; Quaternion q = active_controller.transform.rotation; gr.contdStroke(p, q); // Show the stroke by instatiating new objects GameObject star_instance = Instantiate(GameObject.Find("star")); GameObject star = new GameObject("stroke_" + stroke_index++); star_instance.name = star.name + "_instance"; star_instance.transform.SetParent(star.transform, false); System.Random random = new System.Random(); star.transform.localPosition = new Vector3(p.x + (float)random.NextDouble() / 80, p.y + (float)random.NextDouble() / 80, p.z + (float)random.NextDouble() / 80); star.transform.localRotation = new Quaternion((float)random.NextDouble() - 0.5f, (float)random.NextDouble() - 0.5f, (float)random.NextDouble() - 0.5f, (float)random.NextDouble() - 0.5f).normalized; //star.transform.localRotation.Normalize(); float star_scale = (float)random.NextDouble() + 0.3f; star.transform.localScale = new Vector3(star_scale, star_scale, star_scale); stroke.Add(star.name); return; } // else: if we arrive here, the user let go of the trigger, ending a gesture. active_controller = null; // Delete the objectes that we used to display the gesture. foreach (string star in stroke) { Destroy(GameObject.Find(star)); stroke_index = 0; } double similarity = 0; // This will receive a value of how similar the performed gesture was to previous recordings. Vector3 pos = Vector3.zero; // This will receive the position where the gesture was performed. double scale = 0; // This will receive the scale at which the gesture was performed. Vector3 dir0 = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion). Vector3 dir1 = Vector3.zero; // This will receive the secondary direction of the gesture. Vector3 dir2 = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion). int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2); // if (similarity < ???) { // ...maybe this is not the gesture I was looking for... // } // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet. if (recording_gesture >= 0) { // Currently recording samples for a custom gesture - check how many we have recorded so far. int num_samples = gr.getGestureNumberOfSamples(recording_gesture); if (num_samples < 25) { // Not enough samples recorded yet. HUDText.text = "Learning a new gesture (custom gesture " + (recording_gesture - 3) + "):\nPlease perform the gesture 25 times.\n(" + num_samples + " / 25)"; } else { // Enough samples recorded. Start the learning process. HUDText.text = "Learning gestures - please wait...\n(press A/X/menu button to stop the learning process)"; // Set up the call-backs to receive information about the learning process. gr.setTrainingUpdateCallback(trainingUpdateCallback); gr.setTrainingUpdateCallbackMetadata((IntPtr)me); gr.setTrainingFinishCallback(trainingFinishCallback); gr.setTrainingFinishCallbackMetadata((IntPtr)me); gr.setMaxTrainingTime(10000); // Set recording_gesture to -2 to indicate that we're currently in learning mode. recording_gesture = -2; if (gr.startTraining() == false) { Debug.Log("COULD NOT START TRAINING"); } } return; } // else: if we arrive here, we're not recording new sampled for custom gestures, // but instead have identified a new gesture. // Perform the action associated with that gesture. if (gesture_id < 0) { // Error trying to identify any gesture HUDText.text = "Failed to identify gesture."; } else if (gesture_id == 0) { // "loop"-gesture: create cylinder HUDText.text = "Identified a CIRCLE/LOOP gesture!"; GameObject cylinder = Instantiate(GameObject.Find("controller_dummy")); cylinder.transform.localPosition = pos; cylinder.transform.localRotation = Quaternion.FromToRotation(new Vector3(0, 1, 0), dir2); cylinder.transform.localScale = new Vector3((float)scale * 2, (float)scale, (float)scale * 2); created_objects.Add(cylinder); } else if (gesture_id == 1) { // "swipe left"-gesture: rotate left HUDText.text = "Identified a SWIPE LEFT gesture!"; GameObject closest_object = getClosestObject(pos); if (closest_object != null) { closest_object.transform.Rotate(new Vector3(0, 1, 0), (float)scale * 400, Space.World); } } else if (gesture_id == 2) { // "swipe right"-gesture: rotate right HUDText.text = "Identified a SWIPE RIGHT gesture!"; GameObject closest_object = getClosestObject(pos); if (closest_object != null) { closest_object.transform.Rotate(new Vector3(0, 1, 0), -(float)scale * 400, Space.World); } } else if (gesture_id == 3) { // "shake" or "scrap" gesture: delete closest object HUDText.text = "Identified a SHAKE gesture!"; GameObject closest_object = getClosestObject(pos); if (closest_object != null) { Destroy(closest_object); created_objects.Remove(closest_object); } } else if (gesture_id == 4) { // "draw sword" gesture HUDText.text = "MAGIC!"; Color col = RenderSettings.skybox.GetColor("_Tint"); if (col.r < 0.51) { RenderSettings.skybox.SetColor("_Tint", new Color(0.53f, 0.17f, 0.17f, 1.0f)); } else // reset the tint { RenderSettings.skybox.SetColor("_Tint", new Color(0.5f, 0.5f, 0.5f, 1.0f)); } } else { // Other ID: one of the user-registered gestures: HUDText.text = "Identified custom registered gesture " + (gesture_id - 4); } }
public void Register(string name) //it may need a strnig as parameter when user input implements. { gr.createGesture(name); gestureList.Add(new Gesture(name)); }
// Update: void Update() { float escape = Input.GetAxis("escape"); if (escape > 0.0f) { Application.Quit(); } float trigger_left = Input.GetAxis("LeftControllerTrigger"); float trigger_right = Input.GetAxis("RightControllerTrigger"); bool button_a_left = Input.GetButton("LeftControllerButtonA"); bool button_a_right = Input.GetButton("RightControllerButtonA"); if (button_a_pressed) { if (!button_a_left && !button_a_right) { button_a_pressed = false; } return; } // If recording_gesture is -3, that means that the AI has recently finished learning a new gesture. if (recording_gesture == -3) { // Save the data to file. #if UNITY_EDITOR string GesturesFilePath = "Assets/GestureRecognition"; #elif UNITY_ANDROID string GesturesFilePath = Application.persistentDataPath; #else string GesturesFilePath = Application.streamingAssetsPath; #endif if (this.SaveGesturesFile == null) { this.SaveGesturesFile = "Sample_Continuous_MyRecordedGestures.dat"; } this.gr.saveToFile(GesturesFilePath + "/" + this.SaveGesturesFile); // Show "finished" message. double performance = gr.recognitionScore(); HUDText.text = "Training finished!\n(Final recognition performance = " + (performance * 100.0) + "%)\nFeel free to use your new gesture."; // Set recording_gesture to -1 to indicate normal operation (learning finished). recording_gesture = -1; return; } // If recording_gesture is -2, that means that the AI is currently learning a new gesture. if (recording_gesture == -2) { // Show "please wait" message HUDText.text = "...training...\n(Current recognition performance = " + (last_performance_report * 100.0) + "%)\nPress the 'A'/'X'/Menu button to stop training."; // In this mode, the user may press the "A/X/menu" button to cancel the learning process. if (button_a_left || button_a_right) { // Button pressed: stop the learning process. gr.stopTraining(); button_a_pressed = true; } return; } // Else: if we arrive here, we're not in training/learning mode, // so the user can draw gestures. if (button_a_left || button_a_right) { button_a_pressed = true; // If recording_gesture is -1, we're currently not recording a new gesture. if (recording_gesture == -1) { // In this mode, the user can press button A/X/menu to create a new gesture recording_gesture = gr.createGesture("Your gesture #" + (gr.numberOfGestures() - 4)); // from now on: recording a new gesture HUDText.text = "Learning a new gesture (" + (gr.getGestureName(recording_gesture)) + "):\nPlease perform the gesture for a while.\n(0 samples recorded)"; gr.contdIdentificationPeriod = (int)(this.GesturePeriod * 1000.0f); // to milliseconds } else { HUDText.text = "Learning gestures - please wait...\n(press A/X/menu button to stop the learning process)"; // Set up the call-backs to receive information about the learning process. gr.setTrainingUpdateCallback(trainingUpdateCallback); gr.setTrainingUpdateCallbackMetadata((IntPtr)me); gr.setTrainingFinishCallback(trainingFinishCallback); gr.setTrainingFinishCallbackMetadata((IntPtr)me); gr.setMaxTrainingTime(30); // Set recording_gesture to -2 to indicate that we're currently in learning mode. recording_gesture = -2; if (gr.startTraining() == false) { Debug.Log("COULD NOT START TRAINING"); } } return; } // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet. if (active_controller == null) { // If the user presses either controller's trigger, we start a new gesture. if (trigger_right > 0.9) { // Right controller trigger pressed. active_controller = GameObject.Find("Right Hand"); } else if (trigger_left > 0.9) { // Left controller trigger pressed. active_controller = GameObject.Find("Left Hand"); } else { // If we arrive here, the user is pressing neither controller's trigger: // nothing to do. return; } // If we arrive here: either trigger was pressed, so we start the gesture. gr.contdIdentificationPeriod = (int)(this.GesturePeriod * 1000.0f); // to milliseconds GameObject hmd = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject Vector3 hmd_p = hmd.transform.position; Quaternion hmd_q = hmd.transform.rotation; gr.startStroke(hmd_p, hmd_q, recording_gesture); this.stroke_start_time = Time.time; } double similarity = -1.0; // This will receive a value of how similar the performed gesture was to previous recordings. int gesture_id = -1; // This will receive the ID of the gesture. // If we arrive here, the user is currently dragging with one of the controllers. // Check if the user is still dragging or if he let go of the trigger button. if (trigger_left > 0.85 || trigger_right > 0.85) { Vector3 p = active_controller.transform.position; Quaternion q = active_controller.transform.rotation; gr.contdStrokeQ(p, q); // The user is still dragging with the controller: continue the gesture. if (Time.time - this.stroke_start_time > GesturePeriod && Time.time - this.last_recognition_time > RecognitionInterval) { GameObject hmd = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject Vector3 hmd_p = hmd.transform.position; Quaternion hmd_q = hmd.transform.rotation; if (recording_gesture >= 0) { gr.contdRecord(hmd_p, hmd_q); int num_samples = gr.getGestureNumberOfSamples(recording_gesture); HUDText.text = "Learning a new gesture (" + (gr.getGestureName(recording_gesture)) + ").\n\n(" + num_samples + " samples recorded)"; } else { gesture_id = gr.contdIdentify(hmd_p, hmd_q, ref similarity); if (gesture_id >= 0) { string gesture_name = gr.getGestureName(gesture_id); HUDText.text = "Identifying gesture '" + gesture_name + "'.\n\n(similarity: " + similarity + "%)"; } } this.last_recognition_time = Time.time; } // Prune the stroke of all items that are too old to be used float cutoff_time = Time.time - GesturePeriod; while (stroke.Count > 0 && stroke[0].time < cutoff_time) { GameObject star_object = GameObject.Find(stroke[0].name); if (star_object != null) { Destroy(star_object); } stroke.RemoveAt(0); } // Extend the stroke by instatiating new objects stroke.Add(new StrokePoint(p)); return; } // else: if we arrive here, the user let go of the trigger, ending a gesture. active_controller = null; gr.cancelStroke(); // Delete the objects that we used to display the gesture. foreach (StrokePoint star in stroke) { GameObject star_object = GameObject.Find(star.name); if (star_object != null) { Destroy(star_object); } } stroke.Clear(); // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet. if (recording_gesture >= 0) { // Currently recording samples for a custom gesture - check how many we have recorded so far. int num_samples = gr.getGestureNumberOfSamples(recording_gesture); HUDText.text = "Learning a new gesture (" + (gr.getGestureName(recording_gesture)) + "):\n" + "Please perform the gesture for a while.\n(" + num_samples + " samples recorded)\n" + "\nor: press 'A'/'X'/Menu button\nto finish recording and start training."; return; } // else: if we arrive here, we're not recording new sampled for custom gestures, // but instead have identified a new gesture. // Perform the action associated with that gesture. HUDText = GameObject.Find("HUDText").GetComponent <Text>(); HUDText.text = "Hold the trigger to draw gestures.\nAvailable gestures:"; for (int i = 0; i < gr.numberOfGestures(); i++) { HUDText.text += "\n" + (i + 1) + " : " + gr.getGestureName(i); } HUDText.text += "\nor: press 'A'/'X'/Menu button\nto create new gesture."; }