/// <summary> /// Inicializa reconocimiento de gestos /// </summary> private void Start() { //Inicialización gestos de la mano. gestureRecognition = new GestureRecognition(); //Carga los gestos de un archivo de base de datos de gestos string gesturesFilePath = "Assets/GestureRecognition"; if (gestureRecognition.loadFromFile(gesturesFilePath + "/" + LoadGesturesFile) == false) { Debug.LogError("Error cargando gestos"); return; } gestureRecognition.contdIdentificationSmoothing = 5; //Imprime los gestos disponibles for (int i = 0; i < gestureRecognition.numberOfGestures(); i++) { Debug.Log("\n" + (i + 1) + " : " + gestureRecognition.getGestureName(i)); } //Empieza detección de gestos gestureRecognition.contdIdentificationPeriod = (int)(this.GesturePeriod * 1000.0f); // to milliseconds Vector3 headsetPos = Camera.main.gameObject.transform.position; Quaternion headsetRotation = Camera.main.gameObject.transform.rotation; gestureRecognition.startStroke(headsetPos, headsetRotation); }
/// <summary> /// Detección de gestos continua /// </summary> private void Update() { //Instruciones mientras se realiza el reconocimiento de gesto Vector3 controllerPos = rightHand.transform.position; Quaternion controllerRotation = rightHand.transform.rotation; gestureRecognition.contdStrokeQ(controllerPos, controllerRotation); //Se detecta nuevo gesto cada 'RecognitionInterval' segundos if (Time.time - lastRecognitionTime > RecognitionInterval) { Vector3 headsetPos = Camera.main.gameObject.transform.position; Quaternion headsetRotation = Camera.main.gameObject.transform.rotation; //Información del tipo de gesto actual double similarity = -1.0; // [0, 1] int gestureID = gestureRecognition.contdIdentify(headsetPos, headsetRotation, ref similarity); //Gesto válido if (gestureID >= 0) { string gestureName = gestureRecognition.getGestureName(gestureID); //Procesamiento del gesto GestureManagement(gestureID, similarity, gestureName); } lastRecognitionTime = Time.time; } }
public bool LoadDeafaultTrainedData() //load default gesture suggestions file in Assets/StreamingAssets/ { string trainedData = "gestureSuggestions.dat"; string trainedDataPath; #if UNITY_EDITOR trainedDataPath = "Assets/StreamingAssets"; #elif UNITY_ANDROID var unityPlayer = new AndroidJavaClass("com.unity3d.player.UnityPlayer"); var activity = unityPlayer.GetStatic <AndroidJavaObject>("currentActivity"); var unityWebRequest = UnityWebRequest.Get($"{Application.streamingAssetsPath}/{trainedData}"); trainedDataPath = activity.Call <AndroidJavaObject>("getCacheDir").Call <string>("getCanonicalPath"); unityWebRequest.SendWebRequest(); while (!unityWebRequest.isDone) { // wait for file extraction to finish } if (unityWebRequest.isNetworkError) { Debug.Log("Load failed. Network must be connected for loading the default data."); return(false); } File.WriteAllBytes($"{trainedDataPath}/{trainedData}", unityWebRequest.downloadHandler.data); #else trainedDataPath = Application.streamingAssetsPath; #endif if (gr.loadFromFile($"{trainedDataPath}/{trainedData}")) { Debug.Log("Load completed"); gestureList = new List <Gesture>(); for (int i = 0; i < gr.numberOfGestures(); i++) { gestureList.Add(new Gesture(gr.getGestureName(i), gr.getGestureNumberOfSamples(i))); } return(true); } else { Debug.Log("Load failed"); return(false); } }
// Update: void Update() { if (this.gr == null && this.gc == null) { HUDText.text = "Welcome to MARUI Gesture Plug-in!\n" + "This manager allows you to create and record gestures,\n" + "and organize gesture files.\n" + "Please use the Inspector for the XR rig.\n" + "[Currently, no gesture recognition object is created]."; return; } if (training_started) { if ((this.gr != null && this.gr.isTraining()) || (this.gc != null && this.gc.isTraining())) { HUDText.text = "Currently training...\n" + "Current recognition performance: " + (this.last_performance_report * 100).ToString() + "%.\n" + "You can stop training in the Inspector for the XR rig.\n"; return; } else { training_started = false; HUDText.text = "Training finished!\n" + "Final recognition performance: " + (this.last_performance_report * 100).ToString() + "%.\n"; } } float trigger_left = Input.GetAxis("LeftControllerTrigger"); float trigger_right = Input.GetAxis("RightControllerTrigger"); // Single Gesture recognition / 1-handed operation if (this.gr != null) { // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet. if (active_controller == null) { // If the user presses either controller's trigger, we start a new gesture. if (trigger_right > 0.85) { // Right controller trigger pressed. active_controller = GameObject.Find("Right Hand"); } else if (trigger_left > 0.85) { // Left controller trigger pressed. active_controller = GameObject.Find("Left Hand"); } else { // If we arrive here, the user is pressing neither controller's trigger: // nothing to do. return; } // If we arrive here: either trigger was pressed, so we start the gesture. GameObject hmd = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject Vector3 hmd_p = hmd.transform.position; Quaternion hmd_q = hmd.transform.rotation; gr.startStroke(hmd_p, hmd_q, record_gesture_id); return; } // If we arrive here, the user is currently dragging with one of the controllers. // Check if the user is still dragging or if he let go of the trigger button. if (trigger_left > 0.85 || trigger_right > 0.85) { // The user is still dragging with the controller: continue the gesture. Vector3 p = active_controller.transform.position; Quaternion q = active_controller.transform.rotation; gr.contdStrokeQ(p, q); addToStrokeTrail(p); return; } // else: if we arrive here, the user let go of the trigger, ending a gesture. active_controller = null; // Delete the objectes that we used to display the gesture. foreach (string star in stroke) { Destroy(GameObject.Find(star)); stroke_index = 0; } double similarity = 0; // This will receive the similarity value (0~1) Vector3 pos = Vector3.zero; // This will receive the position where the gesture was performed. double scale = 0; // This will receive the scale at which the gesture was performed. Vector3 dir0 = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion). Vector3 dir1 = Vector3.zero; // This will receive the secondary direction of the gesture. Vector3 dir2 = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion). int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2); // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet. if (record_gesture_id >= 0) { // Currently recording samples for a custom gesture - check how many we have recorded so far. HUDText.text = "Recorded a gesture sample for " + gr.getGestureName(record_gesture_id) + ".\n" + "Total number of recorded samples for this gesture: " + gr.getGestureNumberOfSamples(record_gesture_id) + ".\n" + "You can stop recording samples in the Inspector for the XR rig.\n"; return; } // else: if we arrive here, we're not recording new samples, // but instead have identified a gesture. if (gesture_id < 0) { // Error trying to identify any gesture HUDText.text = "Failed to identify gesture."; } else { string gesture_name = gr.getGestureName(gesture_id); HUDText.text = "Identified gesture " + gesture_name + "(" + gesture_id + ")\n(Similarity: " + similarity + ")"; } return; } // GestureCombination recognition / 2-handed operation if (this.gc != null) { // If the user presses either controller's trigger, we start a new gesture. if (trigger_pressed_left == false && trigger_left > 0.9) { // Controller trigger pressed. trigger_pressed_left = true; GameObject hmd = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject Vector3 hmd_p = hmd.transform.position; Quaternion hmd_q = hmd.transform.rotation; int gesture_id = -1; if (record_combination_id >= 0) { gesture_id = gc.getCombinationPartGesture(record_combination_id, lefthand_combination_part); } gc.startStroke(lefthand_combination_part, hmd_p, hmd_q, gesture_id); gesture_started = true; } if (trigger_pressed_right == false && trigger_right > 0.9) { // Controller trigger pressed. trigger_pressed_right = true; GameObject hmd = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject Vector3 hmd_p = hmd.transform.position; Quaternion hmd_q = hmd.transform.rotation; int gesture_id = -1; if (record_combination_id >= 0) { gesture_id = gc.getCombinationPartGesture(record_combination_id, righthand_combination_part); } gc.startStroke(righthand_combination_part, hmd_p, hmd_q, gesture_id); gesture_started = true; } if (gesture_started == false) { // nothing to do. return; } // If we arrive here, the user is currently dragging with one of the controllers. if (trigger_pressed_left == true) { if (trigger_left < 0.85) { // User let go of a trigger and held controller still gc.endStroke(lefthand_combination_part); trigger_pressed_left = false; } else { // User still dragging or still moving after trigger pressed GameObject left_hand = GameObject.Find("Left Hand"); gc.contdStrokeQ(lefthand_combination_part, left_hand.transform.position, left_hand.transform.rotation); // Show the stroke by instatiating new objects addToStrokeTrail(left_hand.transform.position); } } if (trigger_pressed_right == true) { if (trigger_right < 0.85) { // User let go of a trigger and held controller still gc.endStroke(righthand_combination_part); trigger_pressed_right = false; } else { // User still dragging or still moving after trigger pressed GameObject right_hand = GameObject.Find("Right Hand"); gc.contdStrokeQ(righthand_combination_part, right_hand.transform.position, right_hand.transform.rotation); // Show the stroke by instatiating new objects addToStrokeTrail(right_hand.transform.position); } } if (trigger_pressed_left || trigger_pressed_right) { // User still dragging with either hand - nothing left to do return; } // else: if we arrive here, the user let go of both triggers, ending the gesture. gesture_started = false; // Delete the objectes that we used to display the gesture. foreach (string star in stroke) { Destroy(GameObject.Find(star)); stroke_index = 0; } double similarity = 0; // This will receive a similarity value (0~1). int recognized_combination_id = gc.identifyGestureCombination(ref similarity); // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet. if (record_combination_id >= 0) { // Currently recording samples for a custom gesture - check how many we have recorded so far. int connected_gesture_id_left = gc.getCombinationPartGesture(record_combination_id, lefthand_combination_part); int connected_gesture_id_right = gc.getCombinationPartGesture(record_combination_id, righthand_combination_part); int num_samples_left = gc.getGestureNumberOfSamples(lefthand_combination_part, connected_gesture_id_left); int num_samples_right = gc.getGestureNumberOfSamples(righthand_combination_part, connected_gesture_id_right); // Currently recording samples for a custom gesture - check how many we have recorded so far. HUDText.text = "Recorded a gesture sample for " + gc.getGestureCombinationName(record_combination_id) + ".\n" + "Total number of recorded samples for this gesture: " + num_samples_left + " left / " + num_samples_right + " right.\n" + "You can stop recording samples in the Inspector for the XR rig.\n"; return; } // else: if we arrive here, we're not recording new samples for custom gestures, // but instead have identified a new gesture. // Perform the action associated with that gesture. if (recognized_combination_id < 0) { // Error trying to identify any gesture HUDText.text = "Failed to identify gesture."; } else { string combination_name = gc.getGestureCombinationName(recognized_combination_id); HUDText.text = "Identified gesture combination '" + combination_name + "' (" + recognized_combination_id + ")\n(Similarity: " + similarity + ")"; } } }
// Update: void Update() { if (Input.touchCount > 0) { Touch touch = Input.GetTouch(0); Vector2 touch_pos = touch.position; Resolution res = Screen.currentResolution; touch_pos.x = touch_pos.x / res.width; touch_pos.y = touch_pos.y / res.height; if (touch.phase == TouchPhase.Began) { if (touch_pos.x > 0.1f && touch_pos.x < 0.9f) { button_state = ButtonState_Pressed; if (touch_pos.y > 0.3f && touch_pos.y < 0.7f) { button_id = ButtonID_Record; } else if (touch_pos.y > 0.8f) { button_id = ButtonID_Train; } else if (touch_pos.y < 0.2f) { button_id = ButtonID_Exit; } } } else if (touch.phase == TouchPhase.Ended) { button_state = ButtonState_Released; } } else { button_id = ButtonID_None; button_state = ButtonState_Idle; } float escape = Input.GetAxis("escape"); if (escape > 0.0f) { Application.Quit(); } if (button_id == ButtonID_Exit && button_state == ButtonState_Released) { Application.Quit(); } // If recording_gesture is -3, that means that the AI has recently finished learning a new gesture. if (recording_gesture == -3) { // Save the data to file. #if UNITY_EDITOR string save_file_path = "Assets/GestureRecognition"; #elif UNITY_ANDROID string save_file_path = Application.persistentDataPath; #else string save_file_path = Application.streamingAssetsPath; #endif this.gr.saveToFile(save_file_path + "/gestures.dat"); // Show "finished" message. double performance = gr.recognitionScore(); HUDText.text = "Training finished!\n(Performance = " + (performance * 100.0) + "%)\n\n[TOUCH AND HOLD HERE]\nto perform a gesture\n"; // Set recording_gesture to -1 to indicate normal operation (learning finished). recording_gesture = -1; } // If recording_gesture is -2, that means that the AI is currently learning a new gesture. if (recording_gesture == -2) { // Show "please wait" message HUDText.text = "[TOUCH HERE]\nto stop training\n\n\n\n\n\n\n...training...\n\n(" + (last_performance_report * 100.0) + " %)\n\n\n\n\n"; if (button_id == ButtonID_Train && button_state == ButtonState_Released) { // Button pressed: stop the learning process. gr.stopTraining(); } return; } // Else: if we arrive here, we're not in training/learning mode, // so the user can draw gestures. // If recording_gesture is -1, we're currently not recording a new gesture. if (recording_gesture == -1) { if (button_id == ButtonID_Train && button_state == ButtonState_Released) { string random_word = getRandomWord(); recording_gesture = gr.createGesture(random_word); // from now on: recording a new gesture HUDText.text = "Learning a new gesture.\nKeyword:\n'" + random_word + "'\n\n[TOUCH AND HOLD HERE]\nto record gesture sample\n\n\n"; return; } } // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet. if (button_id == ButtonID_Record) { if (!making_stroke) { // If we arrive here: either trigger was pressed, so we start the gesture. Vector3 hmd_p = new Vector3(0.0f, 0.0f, 0.0f); Quaternion hmd_q = new Quaternion(0.0f, 0.0f, 0.0f, 1.0f); gr.startStroke(hmd_p, hmd_q, recording_gesture); making_stroke = true; RenderSettings.skybox.SetColor("_Tint", new Color(0.53f, 0.17f, 0.17f, 1.0f)); HUDText.text = "Hold and move phone\nto make a gesture.\n\n\n\n"; } // the user is dragging with the controller: continue the gesture. // Get phone position / motion: Vector3 p = Input.gyro.userAcceleration; // We could also sample over all recent acceleration events as a /* * Vector3 p = new Vector3(0.0f, 0.0f, 0.0f); * if (Input.accelerationEventCount > 1) * { * foreach (AccelerationEvent acc_event in Input.accelerationEvents) * { * p += acc_event.acceleration * acc_event.deltaTime; * } * } else * { * p = Input.acceleration; * } */ // We use the //Vector3 p = Input.gyro.gravity; // Get phone rotation / orientation: // When using Input.gyro.attitude, the compass reading is included in the phone's orientation. // That means that a gesture performed northward can be a different gesture from the // same motion performed southwards. Usually, this is not what people expect, // so we're using "gravity" instead to detect the phone's orientation. // Quaternion q = Input.gyro.attitude; // As an alternative, we can calculate the phone's orientation from the gravity ("down" vector). Quaternion q = Quaternion.FromToRotation(new Vector3(0, 1, 0), Input.gyro.gravity); // Or we can use the rotational acceleration directly as a pseudo orientation. // Quaternion q = Quaternion.FromToRotation(new Vector3(1, 0, 0), Input.gyro.rotationRateUnbiased); HUDText.text = "acc=\n" + Input.gyro.userAcceleration.x.ToString("0.00") + " " + Input.gyro.userAcceleration.y.ToString("0.00") + " " + Input.gyro.userAcceleration.z.ToString("0.00") + "\n" + "grav=\n" + Input.gyro.gravity.x.ToString("0.00") + " " + Input.gyro.gravity.y.ToString("0.00") + " " + Input.gyro.gravity.z.ToString("0.00"); gr.contdStrokeQ(p, q); return; } if (making_stroke && button_id == ButtonID_None) { double similarity = 0; // This will receive a value of how similar the performed gesture was to previous recordings. Vector3 pos = Vector3.zero; // This will receive the position where the gesture was performed. double scale = 0; // This will receive the scale at which the gesture was performed. Vector3 dir0 = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion). Vector3 dir1 = Vector3.zero; // This will receive the secondary direction of the gesture. Vector3 dir2 = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion). int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2); RenderSettings.skybox.SetColor("_Tint", new Color(0.5f, 0.5f, 0.5f, 1.0f)); if (recording_gesture >= 0) { int num_samples = gr.getGestureNumberOfSamples(recording_gesture); string gesture_name = gr.getGestureName(recording_gesture); HUDText.text = "[TOUCH HERE]\nto stop recording samples\nand start training the AI\n\n\n\n\n[TOUCH AND HOLD HERE]\nto record gesture sample.\n" + num_samples + " samples recorded\n(record at least 20)\n\nGesture keyword:\n" + gesture_name + "\n"; } else { string gesture_name = gr.getGestureName(gesture_id); HUDText.text = "[TOUCH HERE]\nto record a new gesture\n\n\n\n\n\n\n[TOUCH AND HOLD HERE]\nto perform a gesture\n\n\n identified gesture: \n " + gesture_name + "\n\n\n[TOUCH HERE TO EXIT]"; } making_stroke = false; return; } // if (button_id == ButtonID_Train && button_state == ButtonState_Released) { // Currently recording samples for a custom gesture - check how many we have recorded so far. // Enough samples recorded. Start the learning process. HUDText.text = "Learning gestures..."; // Set up the call-backs to receive information about the learning process. gr.setTrainingUpdateCallback(trainingUpdateCallback); gr.setTrainingUpdateCallbackMetadata((IntPtr)me); gr.setTrainingFinishCallback(trainingFinishCallback); gr.setTrainingFinishCallbackMetadata((IntPtr)me); gr.setMaxTrainingTime(20); // Set recording_gesture to -2 to indicate that we're currently in learning mode. recording_gesture = -2; if (gr.startTraining() == false) { HUDText.text = "Failed to start training"; } return; } }
// Update is called once per frame void Update() { if (hitDetect.gameOver) { //HUDText.text = "You lost the game"; } float trigger_left = Input.GetAxis("LeftControllerTrigger"); float trigger_right = Input.GetAxis("RightControllerTrigger"); // Single Gesture recognition / 1-handed operation if (this.gr != null) { // If the user is not yet dragging (pressing the trigger) on either controller, he hasn't started a gesture yet. if (active_controller == null) { // If the user presses either controller's trigger, we start a new gesture. if (handAction.skeletonAction.fingerCurls[0] > fireHand[0] && handAction.skeletonAction.fingerCurls[1] < fireHand[1] && handAction.skeletonAction.fingerCurls[2] < fireHand[2] && handAction.skeletonAction.fingerCurls[3] > fireHand[3] && handAction.skeletonAction.fingerCurls[4] > fireHand[4]) { // Right controller trigger pressed. active_controller = GameObject.Find("Controller (right)"); } else if (trigger_left > 0.8) { // Left controller trigger pressed. active_controller = GameObject.Find("Controller (left)"); } else { // If we arrive here, the user is pressing neither controller's trigger: // nothing to do. return; } // If we arrive here: either trigger was pressed, so we start the gesture. GameObject hmd = GameObject.Find("Main Camera"); // alternative: Camera.main.gameObject Vector3 hmd_p = hmd.transform.localPosition; Quaternion hmd_q = hmd.transform.localRotation; gr.startStroke(hmd_p, hmd_q, record_gesture_id); return; } // If we arrive here, the user is currently dragging with one of the controllers. // Check if the user is still dragging or if he let go of the trigger button. if (trigger_left > 0.3 || handAction.skeletonAction.fingerCurls[0] > fireHand[0] && handAction.skeletonAction.fingerCurls[1] < fireHand[1] && handAction.skeletonAction.fingerCurls[2] < fireHand[2] && handAction.skeletonAction.fingerCurls[3] > fireHand[3] && handAction.skeletonAction.fingerCurls[4] > fireHand[4]) { // The user is still dragging with the controller: continue the gesture. Vector3 p = active_controller.transform.position; Quaternion q = active_controller.transform.rotation; gr.contdStroke(p, q); addToStrokeTrail(p); return; } // else: if we arrive here, the user let go of the trigger, ending a gesture. active_controller = null; // Delete the objectes that we used to display the gesture. foreach (string star in stroke) { Destroy(GameObject.Find(star)); stroke_index = 0; } double similarity = 0; // This will receive the similarity value (0~1) Vector3 pos = Vector3.zero; // This will receive the position where the gesture was performed. double scale = 0; // This will receive the scale at which the gesture was performed. Vector3 dir0 = Vector3.zero; // This will receive the primary direction in which the gesture was performed (greatest expansion). Vector3 dir1 = Vector3.zero; // This will receive the secondary direction of the gesture. Vector3 dir2 = Vector3.zero; // This will receive the minor direction of the gesture (direction of smallest expansion). int gesture_id = gr.endStroke(ref similarity, ref pos, ref scale, ref dir0, ref dir1, ref dir2); // If we are currently recording samples for a custom gesture, check if we have recorded enough samples yet. if (record_gesture_id >= 0) { // Currently recording samples for a custom gesture - check how many we have recorded so far. //HUDText.text = "Recorded a gesture sample for " + gr.getGestureName(record_gesture_id) + ".\n" //+ "Total number of recorded samples for this gesture: " + gr.getGestureNumberOfSamples(record_gesture_id) + ".\n" //+ "You can stop recording samples in the Inspector for the XR rig.\n"; return; } // else: if we arrive here, we're not recording new samples, // but instead have identified a gesture. if (gesture_id < 0) { // Error trying to identify any gesture //HUDText.text = "Failed to identify gesture."; } else { string gesture_name = gr.getGestureName(gesture_id); //HUDText.text = "Identified gesture " + gesture_name + "(" + gesture_id + ")\n(Similarity: " + similarity + ")"; switch (gesture_name) { case "Basic Fire": sc.spell = SpellController.CurrentSpell.Fireball; if (HUDText.enabled) { HUDText.enabled = false; } break; case "Basic Lightning": sc.spell = SpellController.CurrentSpell.LightningBolt; if (HUDText.enabled) { HUDText.enabled = false; } break; case "Basic Ward": sc.spell = SpellController.CurrentSpell.MiniWard; if (HUDText.enabled) { HUDText.enabled = false; } break; case "Basic Air": sc.spell = SpellController.CurrentSpell.AirBlast; if (HUDText.enabled) { HUDText.enabled = false; } break; } } return; } }
// Initialization: void Start() { me = GCHandle.Alloc(this); if (RecognitionInterval == 0) { RecognitionInterval = 0.1f; } if (GesturePeriod == 0) { GesturePeriod = 1.0f; } // Load the set of gestures. if (LoadGesturesFile == null) { LoadGesturesFile = "Sample_Continuous_Gestures.dat"; } // Find the location for the gesture database (.dat) file #if UNITY_EDITOR // When running the scene inside the Unity editor, // we can just load the file from the Assets/ folder: string GesturesFilePath = "Assets/GestureRecognition"; #elif UNITY_ANDROID // On android, the file is in the .apk, // so we need to first "download" it to the apps' cache folder. AndroidJavaClass unityPlayer = new AndroidJavaClass("com.unity3d.player.UnityPlayer"); AndroidJavaObject activity = unityPlayer.GetStatic <AndroidJavaObject>("currentActivity"); string GesturesFilePath = activity.Call <AndroidJavaObject>("getCacheDir").Call <string>("getCanonicalPath"); UnityWebRequest request = UnityWebRequest.Get(Application.streamingAssetsPath + "/" + LoadGesturesFile); request.SendWebRequest(); while (!request.isDone) { // wait for file extraction to finish } if (request.isNetworkError) { HUDText.text = "Failed to extract sample gesture database file from apk."; return; } File.WriteAllBytes(GesturesFilePath + "/" + LoadGesturesFile, request.downloadHandler.data); #else // This will be the case when exporting a stand-alone PC app. // In this case, we can load the gesture database file from the streamingAssets folder. string GesturesFilePath = Application.streamingAssetsPath; #endif int ret = gr.loadFromFile(GesturesFilePath + "/" + LoadGesturesFile); if (ret != 0) { HUDText.text = $"Failed to load sample gesture database file ({ret})\n"; return; } gr.contdIdentificationSmoothing = 5; // Hide unused models in the scene GameObject controller_oculus_left = GameObject.Find("controller_oculus_left"); GameObject controller_oculus_right = GameObject.Find("controller_oculus_right"); GameObject controller_vive_left = GameObject.Find("controller_vive_left"); GameObject controller_vive_right = GameObject.Find("controller_vive_right"); GameObject controller_microsoft_left = GameObject.Find("controller_microsoft_left"); GameObject controller_microsoft_right = GameObject.Find("controller_microsoft_right"); GameObject controller_dummy_left = GameObject.Find("controller_dummy_left"); GameObject controller_dummy_right = GameObject.Find("controller_dummy_right"); controller_oculus_left.SetActive(false); controller_oculus_right.SetActive(false); controller_vive_left.SetActive(false); controller_vive_right.SetActive(false); controller_microsoft_left.SetActive(false); controller_microsoft_right.SetActive(false); controller_dummy_left.SetActive(false); controller_dummy_right.SetActive(false); var input_devices = new List <UnityEngine.XR.InputDevice>(); UnityEngine.XR.InputDevices.GetDevices(input_devices); String input_device = ""; foreach (var device in input_devices) { if (device.characteristics.HasFlag(InputDeviceCharacteristics.HeadMounted)) { input_device = device.name; break; } } if (input_device.Length >= 6 && input_device.Substring(0, 6) == "Oculus") { controller_oculus_left.SetActive(true); controller_oculus_right.SetActive(true); } else if (input_device.Length >= 4 && input_device.Substring(0, 4) == "Vive") { controller_vive_left.SetActive(true); controller_vive_right.SetActive(true); } else if (input_device.Length >= 4 && input_device.Substring(0, 4) == "DELL") { controller_microsoft_left.SetActive(true); controller_microsoft_right.SetActive(true); } else // { controller_dummy_left.SetActive(true); controller_dummy_right.SetActive(true); } GameObject star = GameObject.Find("star"); star.transform.localScale = new Vector3(0.0f, 0.0f, 0.0f); GameObject controller_dummy = GameObject.Find("controller_dummy"); controller_dummy.transform.localScale = new Vector3(0.0f, 0.0f, 0.0f); // Set the welcome message. HUDText = GameObject.Find("HUDText").GetComponent <Text>(); HUDText.text = "Welcome to MARUI Gesture Plug-in!\n" + "Hold the trigger to draw gestures.\nAvailable gestures:"; for (int i = 0; i < gr.numberOfGestures(); i++) { HUDText.text += "\n" + (i + 1) + " : " + gr.getGestureName(i); } HUDText.text += "\nor: press 'A'/'X'/Menu button\nto create new gesture."; }