// Update is called once per frame void Update() { /* * If eyes have been closed, start the timer */ if (FoveInterface.CheckEyesClosed() == Fove.EFVR_Eye.Both) { t.started = true; } if (t.started) { t.value += Time.deltaTime; } if (FoveInterface.CheckEyesClosed() == Fove.EFVR_Eye.Neither && t.started) { /* * If time between closing and opening eyes is less than the threshold * A blink has been detected */ if (t.value < blinkThreshold) { blinkLight.enabled = !blinkLight.enabled; } t.started = false; t.value = 0; } }
// Update is called once per frame private void Update() { Ray ray = new Ray(this.transform.position, FoveInterface.GetHMDRotation() * Vector3.forward * distance); //Rayを画面に表示 Debug.DrawRay(ray.origin, ray.direction * distance, Color.green); /* * Debug.LogFormat("Rotation x : {0}, y: {1} z: {2} ", * ray.direction.x, ray.direction.y, ray.direction.z); */ Debug.LogFormat("{0}", Mathf.Round(FoveInterface.GetHMDPosition().z)); // まばたきでボールを飛ばすように countTime += Time.deltaTime; if (FoveInterface.CheckEyesClosed() == EFVR_Eye.Both && countTime > blinkThreshold) { GameObject tempBall = MakeBall(); tempBall.GetComponent <BaketBall>().Shot(ray.direction * power); tempBall = null; countTime = 0; } }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); RaycastHit hitLeft, hitRight; switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitLeft.point != Vector3.zero && hitRight.point != Vector3.zero) { transform.position = hitLeft.point + ((hitRight.point - hitLeft.point) / 2); } else { transform.position = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2); } /*Debug.Log("NotWink"); * Debug.Log("Right:" + eyes.right); * Debug.Log("Hit_R:" + hitRight.point); * Debug.Log("Left:" + eyes.left); * Debug.Log("Hit_L" + hitLeft.point);*/ break; case Fove.EFVR_Eye.Left: Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitRight.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { transform.position = hitRight.point; } else { transform.position = eyes.right.GetPoint(3.0f); } break; case Fove.EFVR_Eye.Right: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); if (hitLeft.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { transform.position = hitLeft.point; } else { transform.position = eyes.left.GetPoint(3.0f); } break; } }
// Latepdate ensures that the object doesn't lag behind the user's head motion void Update() { //set value rays = fove.GetGazeRays_Immediate(); myEyeStruct = FoveInterface.CheckEyesClosed(); //raycast Physics.Raycast(rays.right, out hit, Mathf.Infinity); if (hit.point != Vector3.zero) //&& (myEyeStruct == EFVR_Eye.Left || myEyeStruct == EFVR_Eye.Both)) { transform.position = hit.point; } }
// Update is called once per frame private void UpdatePositionBasedOnEyes() { // this is from here.. maybe better robustness against lost tracking: https://github.com/twday/Fove-Unity-Examples/blob/master/Assets/Examples/FoveCursor/Scripts/FoveCursor.cs FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); RaycastHit hitLeft, hitRight; switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitLeft.point != Vector3.zero && hitRight.point != Vector3.zero) { transform.position = hitLeft.point + ((hitRight.point - hitLeft.point) / 2); } else { transform.position = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2); } break; case Fove.EFVR_Eye.Left: Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitRight.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { transform.position = hitRight.point; } else { transform.position = eyes.right.GetPoint(3.0f); } break; case Fove.EFVR_Eye.Right: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); if (hitLeft.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { transform.position = hitLeft.point; } else { transform.position = eyes.left.GetPoint(3.0f); } break; } }
// LateUpdate ensures that the object doesn't lag behind the user's head motion void FixedUpdate() { //set value rays = fove.GetGazeRays_Immediate(); myEyeStruct = FoveInterface.CheckEyesClosed(); //raycast Debug.Log(myEyeStruct.ToString()); Physics.Raycast(rays.left, out hit, Mathf.Infinity); if (fove.Gazecast(hit.collider)) { transform.position = hit.point; if (hit.point != Vector3.zero) //&& (myEyeStruct == EFVR_Eye.Right || myEyeStruct == EFVR_Eye.Both)) { transform.position = hit.point; //send a message to a destination gameobject if (prev == null) { //set the prev to the current hit collider prev = hit.collider; Debug.Log("hit collider set collider to my object " + prev.GetType()); if (prev.GetComponent <timerScript>() != null) { prev.SendMessage("StartTimer"); } } else if (prev.name != hit.collider.name) { if (prev.GetComponent <timerScript>() != null) { prev.SendMessage("StopTimer"); } if (hit.collider.GetComponent <timerScript>() != null) { hit.collider.SendMessage("StartTimer"); } prev = hit.collider; } } } }
/// <summary> /// Sets the _rayOrigin variable based on the used VRHMD. /// </summary> private void FindGazeOrigin() { switch (TestController.Instance.TestBlockData.SelectedVRHMD) { case TestBlock.VRHMD.VIVE: _rayOrigin = transform.position; break; case TestBlock.VRHMD.FOVE: if (CurrentControlMethod == TestBlock.ControlMethod.Eyetracking) { FoveInterface.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); switch (eyeClosed) { case (EFVR_Eye.Neither): _rayOrigin = (rays.left.origin + rays.right.origin) * 0.5f; break; case (EFVR_Eye.Left): _rayOrigin = rays.right.origin; break; case (EFVR_Eye.Right): _rayOrigin = rays.left.origin; break; case (EFVR_Eye.Both): _rayOrigin = Vector3.zero; break; } } else if (CurrentControlMethod == TestBlock.ControlMethod.Headtracking) { _rayOrigin = transform.position; } break; } }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: transform.position = eyes.left.GetPoint(this.distanceFromEye) + ((eyes.right.GetPoint(this.distanceFromEye) - eyes.left.GetPoint(this.distanceFromEye)) / 2); break; case Fove.EFVR_Eye.Left: transform.position = eyes.right.GetPoint(this.distanceFromEye); break; case Fove.EFVR_Eye.Right: transform.position = eyes.left.GetPoint(this.distanceFromEye); break; } }
private void Update() { RaycastHit hit; myEyeStruct = FoveInterface.CheckEyesClosed(); gcd = FoveInterface.GetGazeConvergence(); if (!prevgcd.Equals(gcd) || prev == "") { Physics.Raycast(gcd.ray, out hit, Mathf.Infinity); transform.position = hit.point; prevgcd = gcd; //Debug.Log("(accuracy,distance,ray) " + gcd.accuracy + " " + gcd.distance + " " + gcd.ray); } if (prev == "" || prev != myEyeStruct.ToString()) { prev = myEyeStruct.ToString(); //Debug.Log(prev); } }
/// <summary> /// Update cursor position on screen based on the eye gaze ray from VRHMD (if using eyetracking as control method). /// This function is also called if RecordGazePosition is true, in which case the ray is still handled but the cursor position is not updated. /// This is in case we want to record gaze position regardless of using eyetracking as control method. /// </summary> private void TrackEyes() { List <Vector3> eyeDirections = new List <Vector3>(); switch (TestController.Instance.TestBlockData.SelectedVRHMD) { case TestBlock.VRHMD.VIVE: Vector3 gaze = Pupil.values.GazePoint3D; //Transform and correct eye-tracking gaze = (transform.rotation * gaze).normalized; Vector3 delta = transform.forward.normalized - gaze; gaze = gaze + delta * 2; //float eyeConfidence = (Pupil.values.Confidences[0] + Pupil.values.Confidences[1]) / 2.0f; //if (eyeConfidence > 0.7f) //{ eyeDirections.Add(gaze); //} break; case TestBlock.VRHMD.FOVE: FoveInterface.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); switch (eyeClosed) { case (EFVR_Eye.Neither): eyeDirections.Add(rays.left.direction); eyeDirections.Add(rays.right.direction); break; case (EFVR_Eye.Left): eyeDirections.Add(rays.right.direction); break; case (EFVR_Eye.Right): eyeDirections.Add(rays.left.direction); break; case (EFVR_Eye.Both): eyeDirections.Add(Vector3.zero); break; } break; } Vector3 direction = Vector3.zero; foreach (Vector3 eyeDirection in eyeDirections) { direction += eyeDirection; } direction = direction / eyeDirections.Count; Ray ray = new Ray(_rayOrigin, direction); ray = GetAverageEyeRay(ray); CurrentEyeGazeScreenPoint = VRCamera.WorldToScreenPoint(_rayOrigin + ray.direction * POINT_CALCULATION_DISTANCE); if (CurrentControlMethod == TestBlock.ControlMethod.Eyetracking) { HandleRay(ray); } else { HandleGazeTrackingRay(ray); } Debug.DrawRay(ray.origin, ray.direction * 100); }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); RaycastHit hitLeft, hitRight; switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitLeft.point != Vector3.zero && hitRight.point != Vector3.zero) { originhit = hitLeft.point + ((hitRight.point - hitLeft.point) / 2); //視点座標を代入 //眼球の中心座標を求める //視点の座標から眼球の中心座標を引いて、眼球から視点までのベクトルを求める origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); //√計算 origindirection3 = Mathf.Sqrt(origindirection2); //√計算 newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //長さ10のベクトルに変換 //眼球の座標にトンネリングまでの座標を足す transform.position = newtunnering; //トンネリングを移動 kyori = newtunnering.x * newtunnering.x + newtunnering.y * newtunnering.y + newtunnering.z * newtunnering.z; Debug.Log(kyori.ToString()); } else { originhit = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2); //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } break; case Fove.EFVR_Eye.Left: Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitRight.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { originhit = hitRight.point; //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } else { originhit = eyes.right.GetPoint(3.0f); //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } break; case Fove.EFVR_Eye.Right: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); if (hitLeft.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { originhit = hitLeft.point; //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } else { originhit = eyes.left.GetPoint(3.0f); //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } break; } }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); for (int i = 0; i < 9; i++) { _object[i].GetComponent <Renderer>().material = _material[0]; } k = 0; eyevector.x = eyevector.y = eyevector.z = 0; truthvector.x = truthvector.y = truthvector.z = 0; //デフォルトの状態のとき if (k == 0) { if (Input.GetKey(KeyCode.Q)) { k = 1; } if (Input.GetKey(KeyCode.W)) { k = 2; } if (Input.GetKey(KeyCode.E)) { k = 3; } if (Input.GetKey(KeyCode.A)) { k = 4; } if (Input.GetKey(KeyCode.S)) { k = 5; } if (Input.GetKey(KeyCode.D)) { k = 6; } if (Input.GetKey(KeyCode.Z)) { k = 7; } if (Input.GetKey(KeyCode.X)) { k = 8; } if (Input.GetKey(KeyCode.C)) { k = 9; } } if (k > 0) { _object[k - 1].GetComponent <Renderer>().material = _material[1]; //ベクトルの取得 switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: eyevector = (FoveInterface.GetLeftEyeVector() + FoveInterface.GetLeftEyeVector()) / 2; truthvector = _object[k - 1].GetComponent <Transform>().position - ((eyes.left.origin + eyes.right.origin) / 2); break; case Fove.EFVR_Eye.Left: eyevector.x = eyevector.y = eyevector.z = 0; truthvector.x = truthvector.y = truthvector.z = 0; //eyevector = FoveInterface.GetRightEyeVector(); //truthvector = _object[k - 1].GetComponent<Transform>().position - eyes.right.origin; break; case Fove.EFVR_Eye.Right: eyevector.x = eyevector.y = eyevector.z = 0; truthvector.x = truthvector.y = truthvector.z = 0; //eyevector = FoveInterface.GetLeftEyeVector(); //truthvector = _object[k - 1].GetComponent<Transform>().position - eyes.left.origin; break; } //なす角の計算 theta = Mathf.Acos(Vector3.Dot(eyevector, truthvector) / (eyevector.magnitude * truthvector.magnitude)) * Mathf.Rad2Deg; Debug.Log(theta);//誤差の表示 streamWriter.Write(k.ToString() + ',' + theta.ToString() + ',' + eyevector.x.ToString() + ',' + eyevector.y.ToString() + ',' + eyevector.z.ToString() + ',' + truthvector.x.ToString() + ',' + truthvector.y.ToString() + ',' + truthvector.z.ToString()); //csvに書き込むデータのリスト streamWriter.WriteLine();//改行 } Debug.Log(k); }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); RaycastHit hitLeft, hitRight; //pos = mousemove / new Vector2(Screen.width, Screen.height); switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitLeft.point != Vector3.zero && hitRight.point != Vector3.zero) { //transform.position = hitLeft.point + ((hitRight.point - hitLeft.point) / 2); eyepos = hitLeft.point + ((hitRight.point - hitLeft.point) / 2); eyepos = eyepos / new Vector2(25, 14); //tunpos = eyepos / new Vector2(Screen.width, Screen.height); //マウスの座標をシェーダーに代入するために値を調整 //1~0で表現するためにスクリーンの大きさで割る //tunpos -= new Vector2(0.5f, 0.5f);//中心座標のずれを修正 //plane.SetFloat("_UX", tunpos.x);//マウスのx座標をシェーダーのx座標に代入 //plane.SetFloat("_VY", tunpos.y);//マウスのy座標をシェーダーのx座標に代入 plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } else { //transform.position = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2); eyepos = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2);; eyepos = eyepos / new Vector2(25, 14); //tunpos = eyepos / new Vector2(Screen.width, Screen.height); //マウスの座標をシェーダーに代入するために値を調整 //1~0で表現するためにスクリーンの大きさで割る //tunpos -= new Vector2(0.5f, 0.5f);//中心座標のずれを修正 //plane.SetFloat("_UX", tunpos.x);//マウスのx座標をシェーダーのx座標に代入 //plane.SetFloat("_VY", tunpos.y);//マウスのy座標をシェーダーのx座標に代入 plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } break; case Fove.EFVR_Eye.Left: Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitRight.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { //transform.position = hitRight.point; eyepos = hitRight.point; eyepos = eyepos / new Vector2(25, 14); //tunpos = eyepos / new Vector2(Screen.width, Screen.height); //マウスの座標をシェーダーに代入するために値を調整 //1~0で表現するためにスクリーンの大きさで割る //tunpos -= new Vector2(0.5f, 0.5f);//中心座標のずれを修正 //plane.SetFloat("_UX", tunpos.x);//マウスのx座標をシェーダーのx座標に代入 //plane.SetFloat("_VY", tunpos.y);//マウスのy座標をシェーダーのx座標に代入 plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } else { //transform.position = eyes.right.GetPoint(3.0f); eyepos = eyes.right.GetPoint(3.0f); eyepos = eyepos / new Vector2(26, 14); //tunpos = eyepos / new Vector2(Screen.width, Screen.height); //マウスの座標をシェーダーに代入するために値を調整 //1~0で表現するためにスクリーンの大きさで割る //tunpos -= new Vector2(0.5f, 0.5f);//中心座標のずれを修正 //plane.SetFloat("_UX", tunpos.x);//マウスのx座標をシェーダーのx座標に代入 //plane.SetFloat("_VY", tunpos.y);//マウスのy座標をシェーダーのx座標に代入 plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } break; case Fove.EFVR_Eye.Right: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); if (hitLeft.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { //transform.position = hitLeft.point; eyepos = hitLeft.point; eyepos = eyepos / new Vector2(25, 14); //tunpos = eyepos / new Vector2(Screen.width, Screen.height); //マウスの座標をシェーダーに代入するために値を調整 //1~0で表現するためにスクリーンの大きさで割る //tunpos -= new Vector2(0.5f, 0.5f);//中心座標のずれを修正 //plane.SetFloat("_UX", tunpos.x);//マウスのx座標をシェーダーのx座標に代入 //plane.SetFloat("_VY", tunpos.y);//マウスのy座標をシェーダーのx座標に代入 plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } else { //transform.position = eyes.left.GetPoint(3.0f); eyepos = eyes.left.GetPoint(3.0f); eyepos = eyepos / new Vector2(26, 14); //tunpos = eyepos / new Vector2(Screen.width, Screen.height); //マウスの座標をシェーダーに代入するために値を調整 //1~0で表現するためにスクリーンの大きさで割る //tunpos -= new Vector2(0.5f, 0.5f);//中心座標のずれを修正 //plane.SetFloat("_UX", tunpos.x);//マウスのx座標をシェーダーのx座標に代入 //plane.SetFloat("_VY", tunpos.y);//マウスのy座標をシェーダーのx座標に代入 plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } break; } //Debug.Log("Eyepos" + eyepos); //Debug.Log("Tunpos" + tunpos); //Debug.Log("Pos(" + pos.x + "," + pos.y + ")"); }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); RaycastHit hitLeft, hitRight; switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitLeft.point != Vector3.zero && hitRight.point != Vector3.zero) { //origineye = (eyes.left.origin + eyes.right.origin) / 2;//眼球の中間の座標を求める //originvector = (FoveInterface.GetLeftEyeVector() + FoveInterface.GetRightEyeVector()) / 2;//両目のベクトルの平均を求める //origineye = eyes.right.origin;//右目の座標を求める //originvector = FoveInterface.GetRightEyeVector();//右目のベクトルの平均を求める //tunneringpos = root10 * originvector - origineye;//眼球の座標と大きさ10のベクトルでトンネリングの座標を求める tunneringpos = ((eyes.left.origin + FoveInterface.GetLeftEyeVector() * 10.0f) + (eyes.right.origin + FoveInterface.GetRightEyeVector() * 10.0f)) / 2; //tunneringpos = eyes.right.origin + FoveInterface.GetRightEyeVector() * 10.0f; transform.position = tunneringpos; //トンネリングを移動 //kyori = tunneringpos.x * tunneringpos.x + tunneringpos.y * tunneringpos.y + tunneringpos.z * tunneringpos.z; /*Debug.Log(origineye.ToString()); * Debug.Log(originvector.ToString()); * Debug.Log(tunneringpos.ToString());*/ //Debug.Log(tunneringpos.x.ToString() + ":" + tunneringpos.y.ToString() + ":" + tunneringpos.z.ToString() + ":" + kyori.ToString()); } else { originhit = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2); //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } break; case Fove.EFVR_Eye.Left: Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitRight.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { origineye = eyes.right.origin; //右目の座標を求める originvector = FoveInterface.GetRightEyeVector(); //右目のベクトルの平均を求める tunneringpos = root10 * originvector - origineye; //眼球の座標と大きさ10のベクトルでトンネリングの座標を求める transform.position = tunneringpos; //トンネリングを移動 //kyori = tunneringpos.x * tunneringpos.x + tunneringpos.y * tunneringpos.y + tunneringpos.z * tunneringpos.z; //Debug.Log(tunneringpos.x.ToString() + ":" + tunneringpos.y.ToString() + ":" + tunneringpos.z.ToString() + ":" + kyori.ToString()); } else { originhit = eyes.right.GetPoint(3.0f); //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } break; case Fove.EFVR_Eye.Right: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); if (hitLeft.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { origineye = eyes.left.origin; //左目の座標を求める originvector = FoveInterface.GetLeftEyeVector(); //左目のベクトルの平均を求める tunneringpos = root10 * originvector - origineye; //眼球の座標と大きさ10のベクトルでトンネリングの座標を求める transform.position = tunneringpos; //トンネリングを移動 //kyori = tunneringpos.x * tunneringpos.x + tunneringpos.y * tunneringpos.y + tunneringpos.z * tunneringpos.z; //Debug.Log(tunneringpos.x.ToString() + ":" + tunneringpos.y.ToString() + ":" + tunneringpos.z.ToString() + ":" + kyori.ToString()); } else { originhit = eyes.left.GetPoint(3.0f); //視点を代入 origindirection = originhit.x * originhit.x + originhit.y * originhit.y + originhit.z * originhit.z; //距離の2乗を計算 origindirection2 = Mathf.Sqrt(origindirection); origindirection3 = Mathf.Sqrt(origindirection2); newtunnering = new Vector3(originhit.x * root10 / origindirection3, originhit.y * root10 / origindirection3, originhit.z * root10 / origindirection3); //トンネリングの座標を計算して代入 transform.position = newtunnering; //トンネリングを移動 } break; } }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); RaycastHit hitLeft, hitRight; switch (FoveInterface.CheckEyesClosed()) { case Fove.EFVR_Eye.Neither: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitLeft.point != Vector3.zero && hitRight.point != Vector3.zero) { eyepos = hitLeft.point + ((hitRight.point - hitLeft.point) / 2); //eyepos = eyepos / displaysize; eyepos.x = eyepos.x / displaysize.x; eyepos.y = eyepos.y / displaysize.y; plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } else { eyepos = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2);; //eyepos = eyepos / displaysize; eyepos.x = eyepos.x / displaysize.x; eyepos.y = eyepos.y / displaysize.y; plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } break; case Fove.EFVR_Eye.Left: Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); if (hitRight.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { eyepos = hitRight.point; //eyepos = eyepos / displaysize; eyepos.x = eyepos.x / displaysize.x; eyepos.y = eyepos.y / displaysize.y; plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } else { eyepos = eyes.right.GetPoint(3.0f); //eyepos = eyepos / displaysize; eyepos.x = eyepos.x / displaysize.x; eyepos.y = eyepos.y / displaysize.y; plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } break; case Fove.EFVR_Eye.Right: Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); if (hitLeft.point != Vector3.zero) // Vector3 is non-nullable; comparing to null is always false { eyepos = hitLeft.point; //eyepos = eyepos / displaysize; eyepos.x = eyepos.x / displaysize.x; eyepos.y = eyepos.y / displaysize.y; plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } else { eyepos = eyes.left.GetPoint(3.0f); //eyepos = eyepos / displaysize; eyepos.x = eyepos.x / displaysize.x; eyepos.y = eyepos.y / displaysize.y; plane.SetFloat("_UX", eyepos.x); //マウスのx座標をシェーダーのx座標に代入 plane.SetFloat("_VY", eyepos.y); //マウスのy座標をシェーダーのx座標に代入 } break; } }
//Gets point where user is looking every frame and interacts with any intersecting gazeobjects if possible void Update() { if (!_initialized) { return; } if (Input.GetKeyDown(KeyCode.H)) { CenterHead(); } if (Input.GetKeyDown(KeyCode.Escape)) { Application.Quit(); } Ray ray = new Ray(); switch (_selectedControlType) { case StreamController.ControlType.Head: ray = new Ray(Head.position, Head.forward * 1000); break; case StreamController.ControlType.Eyes_Mouse: case StreamController.ControlType.Mouse: if (Input.GetMouseButtonDown(1)) { } if (Input.GetMouseButton(1)) { Head.Rotate(Vector3.up, Input.GetAxis("Mouse X") * _mouseRotationSpeed, Space.Self); Head.Rotate(Vector3.right, -Input.GetAxis("Mouse Y") * _mouseRotationSpeed, Space.Self); Head.localRotation = Quaternion.Euler(Head.localEulerAngles.x, Head.localEulerAngles.y, 0); } if (Input.GetMouseButton(0) || _selectedControlType == StreamController.ControlType.Eyes_Mouse) { ray = Camera.main.ScreenPointToRay(Input.mousePosition); } else { ResetHoveredObject(); return; } break; //both of the code for the two input cases was moved further down, since we want gaze data to be recorded for both inputs. case StreamController.ControlType.Eyes: //List<Vector3> eyeDirections = new List<Vector3>(); //FoveInterfaceBase.EyeRays rays = _foveInterface.GetGazeRays(); //EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); //if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Left) // eyeDirections.Add(rays.left.direction); //if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Right) // eyeDirections.Add(rays.right.direction); //Vector3 direction = Vector3.zero; //foreach (Vector3 eyeDirection in eyeDirections) //{ // direction += eyeDirection; //} //direction = direction / eyeDirections.Count; //ray = new Ray(Head.transform.position, direction * 1000); break; case StreamController.ControlType.Joystick: { // // Joystick input //Vector2 JoyInput = new Vector2(Input.GetAxis("Horizontal"), Input.GetAxis("Vertical")); ////if the virtual environment is on, send the command to the VirtualUnityController //if (StreamController.Instance.VirtualEnvironment) //{ // if (VirtualUnityController.Instance.IsActive) // { // VirtualUnityController.Instance.JoystickCommand(JoyInput); // } //} //// Othewise send it to the robotinterface //else //{ // if (RobotInterface.Instance.IsConnected) // { // RobotInterface.Instance.DirectCommandRobot(JoyInput); // } //} break; } } //--Eye direction calculation for all occasions List <Vector3> eyeDirections = new List <Vector3>(); FoveInterfaceBase.EyeRays rays = _foveInterface.GetGazeRays(); EFVR_Eye eyeClosed = FoveInterface.CheckEyesClosed(); if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Left) { eyeDirections.Add(rays.left.direction); } if (eyeClosed != EFVR_Eye.Both && eyeClosed != EFVR_Eye.Right) { eyeDirections.Add(rays.right.direction); } Vector3 direction = Vector3.zero; foreach (Vector3 eyeDirection in eyeDirections) { direction += eyeDirection; } direction = direction / eyeDirections.Count; ray = new Ray(Head.transform.position, direction * 1000); //--------------------------------------------------------- //Positioning of the cursor _cursorCanvas.position = Head.position + ray.direction * _cursorDistance; Debug.DrawRay(ray.origin, ray.direction); RaycastHit hit; if (Physics.Raycast(ray, out hit)) { GazeObject gazeObject = hit.collider.GetComponent <GazeObject>(); if (gazeObject == null) { ResetHoveredObject(); return; } // For this reason we also check if the tag of the gazeobject is the correct one RobotControlTrackPad robotControl = gazeObject.GetComponent <RobotControlTrackPad>(); if (robotControl != null && gazeObject.CompareTag("EyeControlPanel")) { //Control result is provided on hit. This is updated for both cases of input controlResult = robotControl.GetControlResult(hit.point); //If the robotcontrols are activated and the eye tracking is used for motion then send the command to the appropriate controller if (robotControl.IsActivated & !robotControl.IsExternallyDisabled() && _selectedControlType == StreamController.ControlType.Eyes) { if (StreamController.Instance.VirtualEnvironment) { if (VirtualUnityController.Instance.IsActive) { // Debug.Log("Sending gaze command to robot"); VirtualUnityController.Instance.GazeCommand(controlResult); } else { Debug.Log("VirtualUnityController is not connected"); } } // Othewise send it to the robotinterface else { if (RobotInterface.Instance.IsConnected) { RobotInterface.Instance.SendCommand(controlResult); } else { Debug.Log("RobotInterface controller is not connected"); } } //Instead of robotinterface here } //---Joystick Input--- else if (robotControl.IsActivated & !robotControl.IsExternallyDisabled() && _selectedControlType == StreamController.ControlType.Joystick) { // Joystick input Vector2 JoyInput = new Vector2(Input.GetAxis("Horizontal"), Input.GetAxis("Vertical")); //if the virtual environment is on, send the command to the VirtualUnityController if (StreamController.Instance.VirtualEnvironment) { if (VirtualUnityController.Instance.IsActive) { VirtualUnityController.Instance.JoystickCommand(JoyInput); } } // Othewise send it to the robotinterface else { if (RobotInterface.Instance.IsConnected) { RobotInterface.Instance.DirectCommandRobot(JoyInput); } } } } else { //this result means not staring at panel. controlResult = new Vector2(-2, -2); //TODO : SendStopCommandToRobot instead of a zero vector. The zero vector is filtered and still adds movemenet to the robot // RobotInterface.Instance.SendCommand(Vector2.zero); } if (gazeObject == _hoveredGazeObject) { return; } if (_hoveredGazeObject != null) { _hoveredGazeObject.OnUnhover(); } gazeObject.OnHover(); _hoveredGazeObject = gazeObject; } else { ResetHoveredObject(); } }
// Update is called once per frame void Update() { FoveInterface.EyeRays eyes = FoveInterface.GetEyeRays(); RaycastHit hitLeft, hitRight; switch (FoveInterface.CheckEyesClosed()) //瞬き検知 { case Fove.EFVR_Eye.Neither: //両目が開いているとき Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); //左目のraycastの取得 Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); //右目のraycastの取得 if (hitLeft.point != Vector3.zero && hitRight.point != Vector3.zero) { eyerightdistance = eyes.right.direction * root10; //右目から見たトンネリングの座標を計算 eyerighttunnering = eyes.right.origin + eyerightdistance; //右目の座標と合わせることでトンネリングの正しい位置を出す eyeleftdistance = eyes.left.direction * root10; //右目から見たトンネリングの座標を計算 eyelefttunnering = eyes.left.origin + eyeleftdistance; //右目の座標と合わせることでトンネリングの正しい位置を出す eyetunnering = (eyerighttunnering + eyelefttunnering) / 2; //両目のトンネリング座標から、間の座標を出す transform.position = eyetunnering; //座標に移動 //Debug.Log(eyetunnering.x.ToString() + '+' + eyetunnering.y.ToString() + '+' + eyetunnering.z.ToString()); //kyori = eyetunnering.x * eyetunnering.x + eyetunnering.y * eyetunnering.y + eyetunnering.z * eyetunnering.z; //Debug.Log(kyori.ToString()); } else { transform.position = eyes.left.GetPoint(3.0f) + ((eyes.right.GetPoint(3.0f) - eyes.left.GetPoint(3.0f)) / 2); } break; case Fove.EFVR_Eye.Left: //左目が閉じているとき Physics.Raycast(eyes.right, out hitRight, Mathf.Infinity); //右目のraycastの取得 if (hitRight.point != Vector3.zero) { eyerightdistance = eyes.right.direction * root10; //右目から見たトンネリングの座標を計算 eyerighttunnering = eyes.right.origin + eyerightdistance; //右目の座標と合わせることでトンネリングの正しい位置を出す transform.position = eyerighttunnering; //指定座標に移動 //Debug.Log(eyerighttunnering.x.ToString() + '+' + eyerighttunnering.y.ToString() + '+' + eyerighttunnering.z.ToString()); } else { transform.position = eyes.right.GetPoint(3.0f); } break; case Fove.EFVR_Eye.Right: //右目閉じているとき Physics.Raycast(eyes.left, out hitLeft, Mathf.Infinity); //左目のraycastの取得 if (hitLeft.point != Vector3.zero) { eyeleftdistance = eyes.left.direction * root10; //右目から見たトンネリングの座標を計算 eyelefttunnering = eyes.left.origin + eyeleftdistance; //右目の座標と合わせることでトンネリングの正しい位置を出す transform.position = eyelefttunnering; //指定座標に移動 //Debug.Log(eyelefttunnering.x.ToString() + '+' + eyelefttunnering.y.ToString() + '+' + eyelefttunnering.z.ToString()); } else { transform.position = eyes.left.GetPoint(3.0f); } break; } }
// Update is called once per frame. Figure out what the user is looking at void Update() { // Get the current time long currentTimeTicks = DateTime.Now.Ticks; List <string> currentLookAtItemPath = null; // Get a normalize ray of the direction the user's eye is looking at FoveInterfaceBase.GazeConvergenceData gazeConvergenceData = FoveInterface.GetGazeConvergence(); // Determine where the ray hit if it does hit something RaycastHit eyeRayHit; Physics.Raycast(gazeConvergenceData.ray, out eyeRayHit, Mathf.Infinity); // If the ray does hit something, put the cursor at that location if (eyeRayHit.point != Vector3.zero) { transform.position = eyeRayHit.point; currentLookAtItemPath = TransformToObjectPath(eyeRayHit.collider.transform); } // Else, just set it as a point 3 meters away in the direction of the ray // and determine what user is looking at in the skybox else { transform.position = gazeConvergenceData.ray.GetPoint(3.0f); Vector3 gazeDirection = gazeConvergenceData.ray.direction; // Convert from spherical coordinates to longitude and latitude float magnitude = gazeDirection.magnitude; float longitude = Mathf.PI - Mathf.Acos(gazeDirection.y / magnitude); // 0 to PI float latitude = Mathf.Atan2(gazeDirection.x, gazeDirection.z) + Mathf.PI; // 0 to 2 * PI // Map longitude/latitude over to UV coordinates float U = (latitude / (Mathf.PI * 2.0f)) % 1.0f; float V = (longitude / Mathf.PI) % 1.0f; // See if user is looking at a labeled area Color gazeItemColor = labelTexture.GetPixel((int)(U * labelTexture.width), (int)(V * labelTexture.height)); // Fix any floating point rounding issues -- nearest 0.01 gazeItemColor.r = (float)Math.Round(gazeItemColor.r, 2); gazeItemColor.g = (float)Math.Round(gazeItemColor.g, 2); gazeItemColor.b = (float)Math.Round(gazeItemColor.b, 2); // Keep track of what the user is looking at if (labelDictionary.ContainsKey(gazeItemColor)) { currentLookAtItemPath = labelDictionary[gazeItemColor]; } else { currentLookAtItemPath = null; } } // Keep track of eye blink Fove.Managed.EFVR_Eye eyeClosedStatus = FoveInterface.CheckEyesClosed(); if ((lastEyeClosedStatus != Fove.Managed.EFVR_Eye.Neither) && (eyeClosedStatus == Fove.Managed.EFVR_Eye.Neither)) { currentEyeBlinkCount++; } // Keep track of the duration we've looked at that item and blink count long totalLookAtDuration = eyeTrackingLogger.UpdateLabels(currentLookAtItemPath, currentTimeTicks - lastUpdateTimeTicks, currentEyeBlinkCount - lastEyeBlinkCount); // Make sure labeled text is facing user transform.LookAt(foveHeadset.transform); transform.RotateAround(transform.position, transform.up, 180.0f); // Set the labeler to how long we've looked at that item string itemName = (currentLookAtItemPath == null) ? "null" : currentLookAtItemPath[currentLookAtItemPath.Count - 1]; ((TextMesh)eyeLabeler.GetComponent(typeof(TextMesh))).text = itemName + Environment.NewLine + (totalLookAtDuration / TimeSpan.TicksPerMillisecond) + " ms"; // Only render if user specified eyeCursor.GetComponent <Renderer>().enabled = renderCursor; eyeLabeler.GetComponent <Renderer>().enabled = renderCursor; // Update state lastUpdateTimeTicks = currentTimeTicks; lastEyeClosedStatus = eyeClosedStatus; lastEyeBlinkCount = currentEyeBlinkCount; }