Esempio n. 1
0
        public override void dragStop(ref GestureRecognition gr, ref Pixie pixie)
        {
            int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

            if (gesture_id == gestureid_peekaboo)
            {
                pixie.triggerPeekaboo();
                Sample_Pixie.HUDText.text = "That's your new gesture. Awesome!\nFeel try to try out any gesture.";
            }
            else if (gesture_id == gestureid_flip)
            {
                pixie.triggerFlip(pos, dir0);
                Sample_Pixie.HUDText.text = "That's your previous gesture. Cool!\nFeel try to try out any gesture.";
            }
            else if (gesture_id == gestureid_spin)
            {
                pixie.triggerSpin(pos);
                Sample_Pixie.HUDText.text = "That's a 'spin' gesture. Nice!\nFeel try to try out any gesture.";
            }
            else if (gesture_id == gestureid_go)
            {
                pixie.triggerGo(pos + (dir0 * 6.0f * (float)scale));
                Sample_Pixie.HUDText.text = "That's a 'go there' gesture. Splended!\nMake it larger to send the pixie further away.";
            }
            else if (gesture_id == gestureid_come)
            {
                pixie.triggerCome(pos);
                Sample_Pixie.HUDText.text = "That's a 'come here' gesture. Not bad!\nFeel try to try out any gesture.";
            }
        }
Esempio n. 2
0
        public override void dragStop(ref GestureRecognition gr, ref Pixie pixie)
        {
            int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

            if (gesture_id == gestureid_flip)
            {
                pixie.triggerFlip(pos, dir0);
                this.num_commands_issued += 1;
                Sample_Pixie.HUDText.text = "That's your new gesture. Nice!\nTry it a few more times.\n(" + this.num_commands_issued + "/4)";
            }
            else if (gesture_id == gestureid_spin)
            {
                pixie.triggerSpin(pos);
                Sample_Pixie.HUDText.text = "That's a 'spin' gesture. Nice!\nTry a few more times.\n(" + this.num_commands_issued + "/4)";
            }
            else if (gesture_id == gestureid_go)
            {
                pixie.triggerGo(pos + (dir0 * 6.0f * (float)scale));
                Sample_Pixie.HUDText.text = "That's a 'go there' gesture. Nice!\nTry a few more times.\n(" + this.num_commands_issued + "/4)";
            }
            else if (gesture_id == gestureid_come)
            {
                pixie.triggerCome(pos);
                Sample_Pixie.HUDText.text = "That's a 'come here' gesture. Nice!\nTry a few more times.\n(" + this.num_commands_issued + "/4)";
            }
            if (this.num_commands_issued >= 4)
            {
                this.completed            = true;
                Sample_Pixie.HUDText.text = "";
            }
        }
Esempio n. 3
0
    private void DragStop(ref GestureRecognition gr)
    {
        var gesture         = (InteractionMachine.Gesture)gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);
        var cameraTransform = Camera.main.transform;

        if (gesture != InteractionMachine.Gesture.None)
        {
            subs.ForEach(i => i.Notify(gesture, cameraTransform.position, cameraTransform.forward));
        }
    }
    public int EndRead()
    {
        double similarity             = 0;
        int    identifiedGestureIndex = gr.endStroke(ref similarity);

        isPerforming = false;

        //increase recognition rate using similarity??

        return(identifiedGestureIndex);  //will be used by external class when identifying the gesture.
    }
Esempio n. 5
0
        public override void dragStop(ref GestureRecognition gr, ref Pixie pixie)
        {
            int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

            if (gesture_id == gestureid_go)
            {
                pixie.triggerGo(pos + (dir0 * 6.0f * (float)scale));
                this.completed            = true;
                Sample_Pixie.HUDText.text = "";
            }
        }
Esempio n. 6
0
        public override void dragStop(ref GestureRecognition gr, ref Pixie pixie)
        {
            int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

            if (gesture_id == gestureid_peekaboo)
            {
                recorded_samples         += 1;
                Sample_Pixie.HUDText.text = "Let's try it again!\nInvent a new gesture and do it 20 times.\n(" + recorded_samples + "/20)";
            }
            if (recorded_samples >= 20)
            {
                this.completed = true;
                gr.startTraining();
                Sample_Pixie.HUDText.text = "Please wait while your pixie is learning the new gesture...";
            }
        }
Esempio n. 7
0
        public override void dragStop(ref GestureRecognition gr, ref Pixie pixie)
        {
            int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

            if (gesture_id == gestureid_spin)
            {
                pixie.triggerSpin(pos);
                this.num_commands_issued += 1;
                Sample_Pixie.HUDText.text = "Great!\nTry it again!\nMake a twirl (whirling) gesture\nto make the pixie spin. (" + this.num_commands_issued + "/3)";
            }
            if (this.num_commands_issued >= 3)
            {
                this.completed            = true;
                Sample_Pixie.HUDText.text = "";
            }
        }
Esempio n. 8
0
    // Update:
    void Update()
    {
        if (this.gr == null && this.gc == null)
        {
            HUDText.text = "Welcome to MARUI Gesture Plug-in!\n"
                           + "This manager allows you to create and record gestures,\n"
                           + "and organize gesture files.\n"
                           + "Please use the Inspector for the XR rig.\n"
                           + "[Currently, no gesture recognition object is created].";
            return;
        }
        if (training_started)
        {
            if ((this.gr != null && this.gr.isTraining()) || (this.gc != null && this.gc.isTraining()))
            {
                HUDText.text = "Currently training...\n"
                               + "Current recognition performance: " + (this.last_performance_report * 100).ToString() + "%.\n"
                               + "You can stop training in the Inspector for the XR rig.\n";
                return;
            }
            else
            {
                training_started = false;
                HUDText.text     = "Training finished!\n"
                                   + "Final recognition performance: " + (this.last_performance_report * 100).ToString() + "%.\n";
            }
        }

        float trigger_left  = Input.GetAxis("LeftControllerTrigger");
        float trigger_right = Input.GetAxis("RightControllerTrigger");

        // Single Gesture recognition / 1-handed operation
        if (this.gr != null)
        {
            // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet.
            if (active_controller == null)
            {
                // If the user presses either controller's trigger, we start a new gesture.
                if (trigger_right > 0.85)
                {
                    // Right controller trigger pressed.
                    active_controller = GameObject.Find("Right Hand");
                }
                else if (trigger_left > 0.85)
                {
                    // Left controller trigger pressed.
                    active_controller = GameObject.Find("Left Hand");
                }
                else
                {
                    // If we arrive here, the user is pressing neither controller's trigger:
                    // nothing to do.
                    return;
                }
                // If we arrive here: either trigger was pressed, so we start the gesture.
                GameObject hmd   = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject
                Vector3    hmd_p = hmd.transform.position;
                Quaternion hmd_q = hmd.transform.rotation;
                gr.startStroke(hmd_p, hmd_q, record_gesture_id);
                return;
            }

            // If we arrive here, the user is currently dragging with one of the controllers.
            // Check if the user is still dragging or if he let go of the trigger button.
            if (trigger_left > 0.85 || trigger_right > 0.85)
            {
                // The user is still dragging with the controller: continue the gesture.
                Vector3    p = active_controller.transform.position;
                Quaternion q = active_controller.transform.rotation;
                gr.contdStrokeQ(p, q);
                addToStrokeTrail(p);
                return;
            }
            // else: if we arrive here, the user let go of the trigger, ending a gesture.
            active_controller = null;

            // Delete the objectes that we used to display the gesture.
            foreach (string star in stroke)
            {
                Destroy(GameObject.Find(star));
                stroke_index = 0;
            }

            double  similarity = 0;            // This will receive the similarity value (0~1)
            Vector3 pos        = Vector3.zero; // This will receive the position where the gesture was performed.
            double  scale      = 0;            // This will receive the scale at which the gesture was performed.
            Vector3 dir0       = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion).
            Vector3 dir1       = Vector3.zero; // This will receive the secondary direction of the gesture.
            Vector3 dir2       = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion).
            int     gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

            // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet.
            if (record_gesture_id >= 0)
            {
                // Currently recording samples for a custom gesture - check how many we have recorded so far.
                HUDText.text = "Recorded a gesture sample for " + gr.getGestureName(record_gesture_id) + ".\n"
                               + "Total number of recorded samples for this gesture: " + gr.getGestureNumberOfSamples(record_gesture_id) + ".\n"
                               + "You can stop recording samples in the Inspector for the XR rig.\n";
                return;
            }
            // else: if we arrive here, we're not recording new samples,
            // but instead have identified a gesture.
            if (gesture_id < 0)
            {
                // Error trying to identify any gesture
                HUDText.text = "Failed to identify gesture.";
            }
            else
            {
                string gesture_name = gr.getGestureName(gesture_id);
                HUDText.text = "Identified gesture " + gesture_name + "(" + gesture_id + ")\n(Similarity: " + similarity + ")";
            }
            return;
        }


        // GestureCombination recognition / 2-handed operation
        if (this.gc != null)
        {
            // If the user presses either controller's trigger, we start a new gesture.
            if (trigger_pressed_left == false && trigger_left > 0.9)
            {
                // Controller trigger pressed.
                trigger_pressed_left = true;
                GameObject hmd        = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject
                Vector3    hmd_p      = hmd.transform.position;
                Quaternion hmd_q      = hmd.transform.rotation;
                int        gesture_id = -1;
                if (record_combination_id >= 0)
                {
                    gesture_id = gc.getCombinationPartGesture(record_combination_id, lefthand_combination_part);
                }
                gc.startStroke(lefthand_combination_part, hmd_p, hmd_q, gesture_id);
                gesture_started = true;
            }
            if (trigger_pressed_right == false && trigger_right > 0.9)
            {
                // Controller trigger pressed.
                trigger_pressed_right = true;
                GameObject hmd        = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject
                Vector3    hmd_p      = hmd.transform.position;
                Quaternion hmd_q      = hmd.transform.rotation;
                int        gesture_id = -1;
                if (record_combination_id >= 0)
                {
                    gesture_id = gc.getCombinationPartGesture(record_combination_id, righthand_combination_part);
                }
                gc.startStroke(righthand_combination_part, hmd_p, hmd_q, gesture_id);
                gesture_started = true;
            }
            if (gesture_started == false)
            {
                // nothing to do.
                return;
            }

            // If we arrive here, the user is currently dragging with one of the controllers.
            if (trigger_pressed_left == true)
            {
                if (trigger_left < 0.85)
                {
                    // User let go of a trigger and held controller still
                    gc.endStroke(lefthand_combination_part);
                    trigger_pressed_left = false;
                }
                else
                {
                    // User still dragging or still moving after trigger pressed
                    GameObject left_hand = GameObject.Find("Left Hand");
                    gc.contdStrokeQ(lefthand_combination_part, left_hand.transform.position, left_hand.transform.rotation);
                    // Show the stroke by instatiating new objects
                    addToStrokeTrail(left_hand.transform.position);
                }
            }

            if (trigger_pressed_right == true)
            {
                if (trigger_right < 0.85)
                {
                    // User let go of a trigger and held controller still
                    gc.endStroke(righthand_combination_part);
                    trigger_pressed_right = false;
                }
                else
                {
                    // User still dragging or still moving after trigger pressed
                    GameObject right_hand = GameObject.Find("Right Hand");
                    gc.contdStrokeQ(righthand_combination_part, right_hand.transform.position, right_hand.transform.rotation);
                    // Show the stroke by instatiating new objects
                    addToStrokeTrail(right_hand.transform.position);
                }
            }

            if (trigger_pressed_left || trigger_pressed_right)
            {
                // User still dragging with either hand - nothing left to do
                return;
            }
            // else: if we arrive here, the user let go of both triggers, ending the gesture.
            gesture_started = false;

            // Delete the objectes that we used to display the gesture.
            foreach (string star in stroke)
            {
                Destroy(GameObject.Find(star));
                stroke_index = 0;
            }

            double similarity = 0; // This will receive a similarity value (0~1).
            int    recognized_combination_id = gc.identifyGestureCombination(ref similarity);

            // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet.
            if (record_combination_id >= 0)
            {
                // Currently recording samples for a custom gesture - check how many we have recorded so far.
                int connected_gesture_id_left  = gc.getCombinationPartGesture(record_combination_id, lefthand_combination_part);
                int connected_gesture_id_right = gc.getCombinationPartGesture(record_combination_id, righthand_combination_part);
                int num_samples_left           = gc.getGestureNumberOfSamples(lefthand_combination_part, connected_gesture_id_left);
                int num_samples_right          = gc.getGestureNumberOfSamples(righthand_combination_part, connected_gesture_id_right);
                // Currently recording samples for a custom gesture - check how many we have recorded so far.
                HUDText.text = "Recorded a gesture sample for " + gc.getGestureCombinationName(record_combination_id) + ".\n"
                               + "Total number of recorded samples for this gesture: " + num_samples_left + " left / " + num_samples_right + " right.\n"
                               + "You can stop recording samples in the Inspector for the XR rig.\n";
                return;
            }
            // else: if we arrive here, we're not recording new samples for custom gestures,
            // but instead have identified a new gesture.
            // Perform the action associated with that gesture.
            if (recognized_combination_id < 0)
            {
                // Error trying to identify any gesture
                HUDText.text = "Failed to identify gesture.";
            }
            else
            {
                string combination_name = gc.getGestureCombinationName(recognized_combination_id);
                HUDText.text = "Identified gesture combination '" + combination_name + "' (" + recognized_combination_id + ")\n(Similarity: " + similarity + ")";
            }
        }
    }
Esempio n. 9
0
    // Update:
    void Update()
    {
        if (Input.touchCount > 0)
        {
            Touch      touch     = Input.GetTouch(0);
            Vector2    touch_pos = touch.position;
            Resolution res       = Screen.currentResolution;
            touch_pos.x = touch_pos.x / res.width;
            touch_pos.y = touch_pos.y / res.height;
            if (touch.phase == TouchPhase.Began)
            {
                if (touch_pos.x > 0.1f && touch_pos.x < 0.9f)
                {
                    button_state = ButtonState_Pressed;
                    if (touch_pos.y > 0.3f && touch_pos.y < 0.7f)
                    {
                        button_id = ButtonID_Record;
                    }
                    else if (touch_pos.y > 0.8f)
                    {
                        button_id = ButtonID_Train;
                    }
                    else if (touch_pos.y < 0.2f)
                    {
                        button_id = ButtonID_Exit;
                    }
                }
            }
            else if (touch.phase == TouchPhase.Ended)
            {
                button_state = ButtonState_Released;
            }
        }
        else
        {
            button_id    = ButtonID_None;
            button_state = ButtonState_Idle;
        }

        float escape = Input.GetAxis("escape");

        if (escape > 0.0f)
        {
            Application.Quit();
        }

        if (button_id == ButtonID_Exit && button_state == ButtonState_Released)
        {
            Application.Quit();
        }

        // If recording_gesture is -3, that means that the AI has recently finished learning a new gesture.
        if (recording_gesture == -3)
        {
            // Save the data to file.
#if UNITY_EDITOR
            string save_file_path = "Assets/GestureRecognition";
#elif UNITY_ANDROID
            string save_file_path = Application.persistentDataPath;
#else
            string save_file_path = Application.streamingAssetsPath;
#endif
            this.gr.saveToFile(save_file_path + "/gestures.dat");
            // Show "finished" message.
            double performance = gr.recognitionScore();
            HUDText.text = "Training finished!\n(Performance = " + (performance * 100.0) + "%)\n\n[TOUCH AND HOLD HERE]\nto perform a gesture\n";
            // Set recording_gesture to -1 to indicate normal operation (learning finished).
            recording_gesture = -1;
        }
        // If recording_gesture is -2, that means that the AI is currently learning a new gesture.
        if (recording_gesture == -2)
        {
            // Show "please wait" message
            HUDText.text = "[TOUCH HERE]\nto stop training\n\n\n\n\n\n\n...training...\n\n(" + (last_performance_report * 100.0) + " %)\n\n\n\n\n";
            if (button_id == ButtonID_Train && button_state == ButtonState_Released)
            {
                // Button pressed: stop the learning process.
                gr.stopTraining();
            }
            return;
        }
        // Else: if we arrive here, we're not in training/learning mode,
        // so the user can draw gestures.


        // If recording_gesture is -1, we're currently not recording a new gesture.
        if (recording_gesture == -1)
        {
            if (button_id == ButtonID_Train && button_state == ButtonState_Released)
            {
                string random_word = getRandomWord();
                recording_gesture = gr.createGesture(random_word);
                // from now on: recording a new gesture
                HUDText.text = "Learning a new gesture.\nKeyword:\n'" + random_word + "'\n\n[TOUCH AND HOLD HERE]\nto record gesture sample\n\n\n";
                return;
            }
        }

        // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet.
        if (button_id == ButtonID_Record)
        {
            if (!making_stroke)
            {
                // If we arrive here: either trigger was pressed, so we start the gesture.
                Vector3    hmd_p = new Vector3(0.0f, 0.0f, 0.0f);
                Quaternion hmd_q = new Quaternion(0.0f, 0.0f, 0.0f, 1.0f);
                gr.startStroke(hmd_p, hmd_q, recording_gesture);
                making_stroke = true;
                RenderSettings.skybox.SetColor("_Tint", new Color(0.53f, 0.17f, 0.17f, 1.0f));
                HUDText.text = "Hold and move phone\nto make a gesture.\n\n\n\n";
            }
            // the user is dragging with the controller: continue the gesture.

            // Get phone position / motion:

            Vector3 p = Input.gyro.userAcceleration;

            // We could also sample over all recent acceleration events as a

            /*
             * Vector3 p = new Vector3(0.0f, 0.0f, 0.0f);
             * if (Input.accelerationEventCount > 1)
             * {
             *  foreach (AccelerationEvent acc_event in Input.accelerationEvents)
             *  {
             *      p += acc_event.acceleration * acc_event.deltaTime;
             *  }
             * } else
             * {
             *  p = Input.acceleration;
             * }
             */
            // We use the
            //Vector3 p = Input.gyro.gravity;

            // Get phone rotation / orientation:

            // When using Input.gyro.attitude, the compass reading is included in the phone's orientation.
            // That means that a gesture performed northward can be a different gesture from the
            // same motion performed southwards. Usually, this is not what people expect,
            // so we're using "gravity" instead to detect the phone's orientation.
            // Quaternion q = Input.gyro.attitude;

            // As an alternative, we can calculate the phone's orientation from the gravity ("down" vector).
            Quaternion q = Quaternion.FromToRotation(new Vector3(0, 1, 0), Input.gyro.gravity);

            // Or we can use the rotational acceleration directly as a pseudo orientation.
            // Quaternion q = Quaternion.FromToRotation(new Vector3(1, 0, 0), Input.gyro.rotationRateUnbiased);

            HUDText.text = "acc=\n" + Input.gyro.userAcceleration.x.ToString("0.00") + " " + Input.gyro.userAcceleration.y.ToString("0.00") + " " + Input.gyro.userAcceleration.z.ToString("0.00") + "\n"
                           + "grav=\n" + Input.gyro.gravity.x.ToString("0.00") + " " + Input.gyro.gravity.y.ToString("0.00") + " " + Input.gyro.gravity.z.ToString("0.00");

            gr.contdStrokeQ(p, q);
            return;
        }

        if (making_stroke && button_id == ButtonID_None)
        {
            double  similarity = 0;            // This will receive a value of how similar the performed gesture was to previous recordings.
            Vector3 pos        = Vector3.zero; // This will receive the position where the gesture was performed.
            double  scale      = 0;            // This will receive the scale at which the gesture was performed.
            Vector3 dir0       = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion).
            Vector3 dir1       = Vector3.zero; // This will receive the secondary direction of the gesture.
            Vector3 dir2       = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion).
            int     gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);
            RenderSettings.skybox.SetColor("_Tint", new Color(0.5f, 0.5f, 0.5f, 1.0f));
            if (recording_gesture >= 0)
            {
                int    num_samples  = gr.getGestureNumberOfSamples(recording_gesture);
                string gesture_name = gr.getGestureName(recording_gesture);
                HUDText.text = "[TOUCH HERE]\nto stop recording samples\nand start training the AI\n\n\n\n\n[TOUCH AND HOLD HERE]\nto record gesture sample.\n" + num_samples
                               + " samples recorded\n(record at least 20)\n\nGesture keyword:\n" + gesture_name + "\n";
            }
            else
            {
                string gesture_name = gr.getGestureName(gesture_id);
                HUDText.text = "[TOUCH HERE]\nto record a new gesture\n\n\n\n\n\n\n[TOUCH AND HOLD HERE]\nto perform a gesture\n\n\n identified gesture: \n " + gesture_name + "\n\n\n[TOUCH HERE TO EXIT]";
            }
            making_stroke = false;
            return;
        }

        //
        if (button_id == ButtonID_Train && button_state == ButtonState_Released)
        {
            // Currently recording samples for a custom gesture - check how many we have recorded so far.
            // Enough samples recorded. Start the learning process.
            HUDText.text = "Learning gestures...";
            // Set up the call-backs to receive information about the learning process.
            gr.setTrainingUpdateCallback(trainingUpdateCallback);
            gr.setTrainingUpdateCallbackMetadata((IntPtr)me);
            gr.setTrainingFinishCallback(trainingFinishCallback);
            gr.setTrainingFinishCallbackMetadata((IntPtr)me);
            gr.setMaxTrainingTime(20);
            // Set recording_gesture to -2 to indicate that we're currently in learning mode.
            recording_gesture = -2;
            if (gr.startTraining() == false)
            {
                HUDText.text = "Failed to start training";
            }
            return;
        }
    }
Esempio n. 10
0
    // Update is called once per frame
    void Update()
    {
        if (hitDetect.gameOver)
        {
            //HUDText.text = "You lost the game";
        }

        float trigger_left  = Input.GetAxis("LeftControllerTrigger");
        float trigger_right = Input.GetAxis("RightControllerTrigger");

        // Single Gesture recognition / 1-handed operation
        if (this.gr != null)
        {
            // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet.
            if (active_controller == null)
            {
                // If the user presses either controller's trigger, we start a new gesture.
                if (handAction.skeletonAction.fingerCurls[0] > fireHand[0] && handAction.skeletonAction.fingerCurls[1] < fireHand[1] && handAction.skeletonAction.fingerCurls[2] < fireHand[2] && handAction.skeletonAction.fingerCurls[3]
                    > fireHand[3] && handAction.skeletonAction.fingerCurls[4] > fireHand[4])
                {
                    // Right controller trigger pressed.
                    active_controller = GameObject.Find("Controller (right)");
                }
                else if (trigger_left > 0.8)
                {
                    // Left controller trigger pressed.
                    active_controller = GameObject.Find("Controller (left)");
                }
                else
                {
                    // If we arrive here, the user is pressing neither controller's trigger:
                    // nothing to do.
                    return;
                }
                // If we arrive here: either trigger was pressed, so we start the gesture.
                GameObject hmd   = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject
                Vector3    hmd_p = hmd.transform.localPosition;
                Quaternion hmd_q = hmd.transform.localRotation;
                gr.startStroke(hmd_p, hmd_q, record_gesture_id);
                return;
            }

            // If we arrive here, the user is currently dragging with one of the controllers.
            // Check if the user is still dragging or if he let go of the trigger button.
            if (trigger_left > 0.3 ||
                handAction.skeletonAction.fingerCurls[0] > fireHand[0] && handAction.skeletonAction.fingerCurls[1] < fireHand[1] && handAction.skeletonAction.fingerCurls[2] < fireHand[2] && handAction.skeletonAction.fingerCurls[3]
                > fireHand[3] && handAction.skeletonAction.fingerCurls[4] > fireHand[4])
            {
                // The user is still dragging with the controller: continue the gesture.
                Vector3    p = active_controller.transform.position;
                Quaternion q = active_controller.transform.rotation;
                gr.contdStroke(p, q);
                addToStrokeTrail(p);
                return;
            }
            // else: if we arrive here, the user let go of the trigger, ending a gesture.
            active_controller = null;

            // Delete the objectes that we used to display the gesture.
            foreach (string star in stroke)
            {
                Destroy(GameObject.Find(star));
                stroke_index = 0;
            }

            double  similarity = 0;            // This will receive the similarity value (0~1)
            Vector3 pos        = Vector3.zero; // This will receive the position where the gesture was performed.
            double  scale      = 0;            // This will receive the scale at which the gesture was performed.
            Vector3 dir0       = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion).
            Vector3 dir1       = Vector3.zero; // This will receive the secondary direction of the gesture.
            Vector3 dir2       = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion).
            int     gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

            // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet.
            if (record_gesture_id >= 0)
            {
                // Currently recording samples for a custom gesture - check how many we have recorded so far.
                //HUDText.text = "Recorded a gesture sample for " + gr.getGestureName(record_gesture_id) + ".\n"
                //+ "Total number of recorded samples for this gesture: " + gr.getGestureNumberOfSamples(record_gesture_id) + ".\n"
                //+ "You can stop recording samples in the Inspector for the XR rig.\n";
                return;
            }
            // else: if we arrive here, we're not recording new samples,
            // but instead have identified a gesture.
            if (gesture_id < 0)
            {
                // Error trying to identify any gesture
                //HUDText.text = "Failed to identify gesture.";
            }
            else
            {
                string gesture_name = gr.getGestureName(gesture_id);
                //HUDText.text = "Identified gesture " + gesture_name + "(" + gesture_id + ")\n(Similarity: " + similarity + ")";
                switch (gesture_name)
                {
                case "Basic Fire":
                    sc.spell = SpellController.CurrentSpell.Fireball;
                    if (HUDText.enabled)
                    {
                        HUDText.enabled = false;
                    }
                    break;

                case "Basic Lightning":
                    sc.spell = SpellController.CurrentSpell.LightningBolt;
                    if (HUDText.enabled)
                    {
                        HUDText.enabled = false;
                    }
                    break;

                case "Basic Ward":
                    sc.spell = SpellController.CurrentSpell.MiniWard;
                    if (HUDText.enabled)
                    {
                        HUDText.enabled = false;
                    }
                    break;

                case "Basic Air":
                    sc.spell = SpellController.CurrentSpell.AirBlast;
                    if (HUDText.enabled)
                    {
                        HUDText.enabled = false;
                    }
                    break;
                }
            }
            return;
        }
    }
Esempio n. 11
0
    // Update:
    void Update()
    {
        float escape = Input.GetAxis("escape");

        if (escape > 0.0f)
        {
            Application.Quit();
        }
        float trigger_left  = Input.GetAxis("LeftControllerTrigger");
        float trigger_right = Input.GetAxis("RightControllerTrigger");

        // If recording_gesture is -3, that means that the AI has recently finished learning a new gesture.
        if (recording_gesture == -3)
        {
            // Show "finished" message.
            double performance = gr.recognitionScore();
            HUDText.text = "Training finished!\n(Final recognition performance = " + (performance * 100.0) + "%)\nFeel free to use your new gesture.";
            // Set recording_gesture to -1 to indicate normal operation (learning finished).
            recording_gesture = -1;
        }
        // If recording_gesture is -2, that means that the AI is currently learning a new gesture.
        if (recording_gesture == -2)
        {
            // Show "please wait" message
            HUDText.text = "...training...\n(Current recognition performance = " + (last_performance_report * 100.0) + "%)\nPress the 'A'/'X'/Menu button to cancel training.";
            // In this mode, the user may press the "A/X/menu" button to cancel the learning process.
            bool button_a_left  = Input.GetButton("LeftControllerButtonA");
            bool button_a_right = Input.GetButton("RightControllerButtonA");

            /*
             * if (button_a_left || button_a_right) {
             *  // Button pressed: stop the learning process.
             *  gr.stopTraining();
             *  recording_gesture = -3;
             * }
             */
            return;
        }
        // Else: if we arrive here, we're not in training/learning mode,
        // so the user can draw gestures.

        // If recording_gesture is -1, we're currently not recording a new gesture.
        if (recording_gesture == -1)
        {
            bool button_a_left  = Input.GetButton("LeftControllerButtonA");
            bool button_a_right = Input.GetButton("RightControllerButtonA");
            // In this mode, the user can press button A/X/menu to create a new gesture
            if (button_a_left || button_a_right)
            {
                recording_gesture = gr.createGesture("custom gesture " + (gr.numberOfGestures() - 3));
                // from now on: recording a new gesture
                HUDText.text = "Learning a new gesture (custom gesture " + (recording_gesture - 4) + "):\nPlease perform the gesture 25 times.\n(0 / 25)";
            }
        }

        // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet.
        if (active_controller == null)
        {
            // If the user presses either controller's trigger, we start a new gesture.
            if (trigger_right > 0.8)
            {
                // Right controller trigger pressed.
                active_controller = GameObject.Find("Right Hand");
            }
            else if (trigger_left > 0.8)
            {
                // Left controller trigger pressed.
                active_controller = GameObject.Find("Left Hand");
            }
            else
            {
                // If we arrive here, the user is pressing neither controller's trigger:
                // nothing to do.
                return;
            }
            // If we arrive here: either trigger was pressed, so we start the gesture.
            GameObject hmd   = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject
            Vector3    hmd_p = hmd.transform.localPosition;
            Quaternion hmd_q = hmd.transform.localRotation;
            gr.startStroke(hmd_p, hmd_q, recording_gesture);
        }

        // If we arrive here, the user is currently dragging with one of the controllers.
        // Check if the user is still dragging or if he let go of the trigger button.
        if (trigger_left > 0.3 || trigger_right > 0.3)
        {
            // The user is still dragging with the controller: continue the gesture.
            Vector3    p = active_controller.transform.position;
            Quaternion q = active_controller.transform.rotation;
            gr.contdStroke(p, q);
            // Show the stroke by instatiating new objects
            GameObject star_instance = Instantiate(GameObject.Find("star"));
            GameObject star          = new GameObject("stroke_" + stroke_index++);
            star_instance.name = star.name + "_instance";
            star_instance.transform.SetParent(star.transform, false);
            System.Random random = new System.Random();
            star.transform.localPosition = new Vector3(p.x + (float)random.NextDouble() / 80, p.y + (float)random.NextDouble() / 80, p.z + (float)random.NextDouble() / 80);
            star.transform.localRotation = new Quaternion((float)random.NextDouble() - 0.5f, (float)random.NextDouble() - 0.5f, (float)random.NextDouble() - 0.5f, (float)random.NextDouble() - 0.5f).normalized;
            //star.transform.localRotation.Normalize();
            float star_scale = (float)random.NextDouble() + 0.3f;
            star.transform.localScale = new Vector3(star_scale, star_scale, star_scale);
            stroke.Add(star.name);
            return;
        }
        // else: if we arrive here, the user let go of the trigger, ending a gesture.
        active_controller = null;

        // Delete the objectes that we used to display the gesture.
        foreach (string star in stroke)
        {
            Destroy(GameObject.Find(star));
            stroke_index = 0;
        }

        double  similarity = 0;            // This will receive a value of how similar the performed gesture was to previous recordings.
        Vector3 pos        = Vector3.zero; // This will receive the position where the gesture was performed.
        double  scale      = 0;            // This will receive the scale at which the gesture was performed.
        Vector3 dir0       = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion).
        Vector3 dir1       = Vector3.zero; // This will receive the secondary direction of the gesture.
        Vector3 dir2       = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion).
        int     gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2);

        // if (similarity < ???) {
        //     ...maybe this is not the gesture I was looking for...
        // }

        // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet.
        if (recording_gesture >= 0)
        {
            // Currently recording samples for a custom gesture - check how many we have recorded so far.
            int num_samples = gr.getGestureNumberOfSamples(recording_gesture);
            if (num_samples < 25)
            {
                // Not enough samples recorded yet.
                HUDText.text = "Learning a new gesture (custom gesture " + (recording_gesture - 3) + "):\nPlease perform the gesture 25 times.\n(" + num_samples + " / 25)";
            }
            else
            {
                // Enough samples recorded. Start the learning process.
                HUDText.text = "Learning gestures - please wait...\n(press A/X/menu button to stop the learning process)";
                // Set up the call-backs to receive information about the learning process.
                gr.setTrainingUpdateCallback(trainingUpdateCallback);
                gr.setTrainingUpdateCallbackMetadata((IntPtr)me);
                gr.setTrainingFinishCallback(trainingFinishCallback);
                gr.setTrainingFinishCallbackMetadata((IntPtr)me);
                gr.setMaxTrainingTime(10000);
                // Set recording_gesture to -2 to indicate that we're currently in learning mode.
                recording_gesture = -2;
                if (gr.startTraining() == false)
                {
                    Debug.Log("COULD NOT START TRAINING");
                }
            }
            return;
        }
        // else: if we arrive here, we're not recording new sampled for custom gestures,
        // but instead have identified a new gesture.
        // Perform the action associated with that gesture.

        if (gesture_id < 0)
        {
            // Error trying to identify any gesture
            HUDText.text = "Failed to identify gesture.";
        }
        else if (gesture_id == 0)
        {
            // "loop"-gesture: create cylinder
            HUDText.text = "Identified a CIRCLE/LOOP gesture!";
            GameObject cylinder = Instantiate(GameObject.Find("controller_dummy"));
            cylinder.transform.localPosition = pos;
            cylinder.transform.localRotation = Quaternion.FromToRotation(new Vector3(0, 1, 0), dir2);
            cylinder.transform.localScale    = new Vector3((float)scale * 2, (float)scale, (float)scale * 2);
            created_objects.Add(cylinder);
        }
        else if (gesture_id == 1)
        {
            // "swipe left"-gesture: rotate left
            HUDText.text = "Identified a SWIPE LEFT gesture!";
            GameObject closest_object = getClosestObject(pos);
            if (closest_object != null)
            {
                closest_object.transform.Rotate(new Vector3(0, 1, 0), (float)scale * 400, Space.World);
            }
        }
        else if (gesture_id == 2)
        {
            // "swipe right"-gesture: rotate right
            HUDText.text = "Identified a SWIPE RIGHT gesture!";
            GameObject closest_object = getClosestObject(pos);
            if (closest_object != null)
            {
                closest_object.transform.Rotate(new Vector3(0, 1, 0), -(float)scale * 400, Space.World);
            }
        }
        else if (gesture_id == 3)
        {
            // "shake" or "scrap" gesture: delete closest object
            HUDText.text = "Identified a SHAKE gesture!";
            GameObject closest_object = getClosestObject(pos);
            if (closest_object != null)
            {
                Destroy(closest_object);
                created_objects.Remove(closest_object);
            }
        }
        else if (gesture_id == 4)
        {
            // "draw sword" gesture
            HUDText.text = "MAGIC!";
            Color col = RenderSettings.skybox.GetColor("_Tint");
            if (col.r < 0.51)
            {
                RenderSettings.skybox.SetColor("_Tint", new Color(0.53f, 0.17f, 0.17f, 1.0f));
            }
            else // reset the tint
            {
                RenderSettings.skybox.SetColor("_Tint", new Color(0.5f, 0.5f, 0.5f, 1.0f));
            }
        }
        else
        {
            // Other ID: one of the user-registered gestures:
            HUDText.text = "Identified custom registered gesture " + (gesture_id - 4);
        }
    }