예제 #1
0
    // Update:
    void Update()
    {
        float escape = Input.GetAxis("escape");

        if (escape > 0.0f)
        {
            Application.Quit();
        }
        float trigger_left  = Input.GetAxis("LeftControllerTrigger");
        float trigger_right = Input.GetAxis("RightControllerTrigger");

        bool button_a_left  = Input.GetButton("LeftControllerButtonA");
        bool button_a_right = Input.GetButton("RightControllerButtonA");

        if (button_a_pressed)
        {
            if (!button_a_left && !button_a_right)
            {
                button_a_pressed = false;
            }
            return;
        }

        // If recording_gesture is -3, that means that the AI has recently finished learning a new gesture.
        if (recording_gesture == -3)
        {
            // Save the data to file.
#if UNITY_EDITOR
            string GesturesFilePath = "Assets/GestureRecognition";
#elif UNITY_ANDROID
            string GesturesFilePath = Application.persistentDataPath;
#else
            string GesturesFilePath = Application.streamingAssetsPath;
#endif
            if (this.SaveGesturesFile == null)
            {
                this.SaveGesturesFile = "Sample_Continuous_MyRecordedGestures.dat";
            }
            this.gr.saveToFile(GesturesFilePath + "/" + this.SaveGesturesFile);
            // Show "finished" message.
            double performance = gr.recognitionScore();
            HUDText.text = "Training finished!\n(Final recognition performance = " + (performance * 100.0) + "%)\nFeel free to use your new gesture.";
            // Set recording_gesture to -1 to indicate normal operation (learning finished).
            recording_gesture = -1;
            return;
        }
        // If recording_gesture is -2, that means that the AI is currently learning a new gesture.
        if (recording_gesture == -2)
        {
            // Show "please wait" message
            HUDText.text = "...training...\n(Current recognition performance = " + (last_performance_report * 100.0) + "%)\nPress the 'A'/'X'/Menu button to stop training.";
            // In this mode, the user may press the "A/X/menu" button to cancel the learning process.
            if (button_a_left || button_a_right)
            {
                // Button pressed: stop the learning process.
                gr.stopTraining();
                button_a_pressed = true;
            }
            return;
        }
        // Else: if we arrive here, we're not in training/learning mode,
        // so the user can draw gestures.

        if (button_a_left || button_a_right)
        {
            button_a_pressed = true;
            // If recording_gesture is -1, we're currently not recording a new gesture.
            if (recording_gesture == -1)
            {
                // In this mode, the user can press button A/X/menu to create a new gesture
                recording_gesture = gr.createGesture("Your gesture #" + (gr.numberOfGestures() - 4));
                // from now on: recording a new gesture
                HUDText.text = "Learning a new gesture (" + (gr.getGestureName(recording_gesture)) + "):\nPlease perform the gesture for a while.\n(0 samples recorded)";
                gr.contdIdentificationPeriod = (int)(this.GesturePeriod * 1000.0f); // to milliseconds
            }
            else
            {
                HUDText.text = "Learning gestures - please wait...\n(press A/X/menu button to stop the learning process)";
                // Set up the call-backs to receive information about the learning process.
                gr.setTrainingUpdateCallback(trainingUpdateCallback);
                gr.setTrainingUpdateCallbackMetadata((IntPtr)me);
                gr.setTrainingFinishCallback(trainingFinishCallback);
                gr.setTrainingFinishCallbackMetadata((IntPtr)me);
                gr.setMaxTrainingTime(30);
                // Set recording_gesture to -2 to indicate that we're currently in learning mode.
                recording_gesture = -2;
                if (gr.startTraining() == false)
                {
                    Debug.Log("COULD NOT START TRAINING");
                }
            }
            return;
        }

        // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet.
        if (active_controller == null)
        {
            // If the user presses either controller's trigger, we start a new gesture.
            if (trigger_right > 0.9)
            {
                // Right controller trigger pressed.
                active_controller = GameObject.Find("Right Hand");
            }
            else if (trigger_left > 0.9)
            {
                // Left controller trigger pressed.
                active_controller = GameObject.Find("Left Hand");
            }
            else
            {
                // If we arrive here, the user is pressing neither controller's trigger:
                // nothing to do.
                return;
            }
            // If we arrive here: either trigger was pressed, so we start the gesture.
            gr.contdIdentificationPeriod = (int)(this.GesturePeriod * 1000.0f); // to milliseconds
            GameObject hmd   = GameObject.Find("Main Camera");                  // alternative: Camera.main.gameObject
            Vector3    hmd_p = hmd.transform.position;
            Quaternion hmd_q = hmd.transform.rotation;
            gr.startStroke(hmd_p, hmd_q, recording_gesture);
            this.stroke_start_time = Time.time;
        }

        double similarity = -1.0; // This will receive a value of how similar the performed gesture was to previous recordings.
        int    gesture_id = -1;   // This will receive the ID of the gesture.

        // If we arrive here, the user is currently dragging with one of the controllers.
        // Check if the user is still dragging or if he let go of the trigger button.
        if (trigger_left > 0.85 || trigger_right > 0.85)
        {
            Vector3    p = active_controller.transform.position;
            Quaternion q = active_controller.transform.rotation;
            gr.contdStrokeQ(p, q);
            // The user is still dragging with the controller: continue the gesture.
            if (Time.time - this.stroke_start_time > GesturePeriod && Time.time - this.last_recognition_time > RecognitionInterval)
            {
                GameObject hmd   = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject
                Vector3    hmd_p = hmd.transform.position;
                Quaternion hmd_q = hmd.transform.rotation;
                if (recording_gesture >= 0)
                {
                    gr.contdRecord(hmd_p, hmd_q);
                    int num_samples = gr.getGestureNumberOfSamples(recording_gesture);
                    HUDText.text = "Learning a new gesture (" + (gr.getGestureName(recording_gesture)) + ").\n\n(" + num_samples + " samples recorded)";
                }
                else
                {
                    gesture_id = gr.contdIdentify(hmd_p, hmd_q, ref similarity);
                    if (gesture_id >= 0)
                    {
                        string gesture_name = gr.getGestureName(gesture_id);
                        HUDText.text = "Identifying gesture '" + gesture_name + "'.\n\n(similarity: " + similarity + "%)";
                    }
                }
                this.last_recognition_time = Time.time;
            }
            // Prune the stroke of all items that are too old to be used
            float cutoff_time = Time.time - GesturePeriod;
            while (stroke.Count > 0 && stroke[0].time < cutoff_time)
            {
                GameObject star_object = GameObject.Find(stroke[0].name);
                if (star_object != null)
                {
                    Destroy(star_object);
                }
                stroke.RemoveAt(0);
            }
            // Extend the stroke by instatiating new objects
            stroke.Add(new StrokePoint(p));
            return;
        }
        // else: if we arrive here, the user let go of the trigger, ending a gesture.
        active_controller = null;
        gr.cancelStroke();
        // Delete the objects that we used to display the gesture.
        foreach (StrokePoint star in stroke)
        {
            GameObject star_object = GameObject.Find(star.name);
            if (star_object != null)
            {
                Destroy(star_object);
            }
        }
        stroke.Clear();

        // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet.
        if (recording_gesture >= 0)
        {
            // Currently recording samples for a custom gesture - check how many we have recorded so far.
            int num_samples = gr.getGestureNumberOfSamples(recording_gesture);
            HUDText.text = "Learning a new gesture (" + (gr.getGestureName(recording_gesture)) + "):\n"
                           + "Please perform the gesture for a while.\n(" + num_samples + " samples recorded)\n"
                           + "\nor: press 'A'/'X'/Menu button\nto finish recording and start training.";
            return;
        }
        // else: if we arrive here, we're not recording new sampled for custom gestures,
        // but instead have identified a new gesture.
        // Perform the action associated with that gesture.


        HUDText      = GameObject.Find("HUDText").GetComponent <Text>();
        HUDText.text = "Hold the trigger to draw gestures.\nAvailable gestures:";
        for (int i = 0; i < gr.numberOfGestures(); i++)
        {
            HUDText.text += "\n" + (i + 1) + " : " + gr.getGestureName(i);
        }
        HUDText.text += "\nor: press 'A'/'X'/Menu button\nto create new gesture.";
    }