void Start()
        {
            blinker1.SetActive(false);
            blinker2.SetActive(false);

            Debug.Log("Starting Virtually Real Realignment");
            cameraRig = FindObjectOfType <EyeSkillsCameraRig>();

            NetworkManager.instance.RegisterScene("Eye Straightening", "How long can the participant hold fusion as we straighten up their world?");
            NetworkManager.instance.RegisterButton("start", "Start/Re-start straightening", "Start/Re-start straightening the eye from the misaligned position");
            NetworkManager.instance.RegisterButton("store", "Store best fusion loss angle", "Store the greatest angle at which fusion was lost");
            //NetworkManager.instance.RegisterButton("stop", "Stop straightening", "Pause the straightening at the point fusion was lost");
            NetworkManager.instance.RegisterFloat("degree", -45f, 45f, 1f, "Misalignment", "Angle between the eyes.");

            practitionerMode = (PlayerPrefs.GetString("EyeSkills.practitionerMode") == "1") ? true : false;

            //Pick up user calibration
            esInput = EyeSkillsInput.instance;
            FetchEyeCalibration();

            originalSuppressionRatio = cameraRig.config.binocularSuppressionRatio;
            cameraRig.config.leastMisalignmentBeforeFusionLost = 180f; //Make our initial "best" as bad as can be.
            ResetEyeMisalignment();
            userWantsStraightening = true;                             //This will case the camera to roate in the Update phase - ought to be a coroutine really.
        }
        void Start()
        {
            cameraRig = FindObjectOfType <EyeSkillsCameraRig>();

            audioManager = AudioManager.instance;
            NetworkManager.instance.RegisterScene("Detect Aniseikonia", "What scaling and position are needed to fuse?");
            NetworkManager.instance.RegisterButton("unlockRight", "Unlock right eye", "Unlock the right eye");
            NetworkManager.instance.RegisterButton("unlockLeft", "Unlock left eye", "Unlock the left eye");
            NetworkManager.instance.RegisterButton("lock", "Lock both", "Lock both eyes");
            NetworkManager.instance.RegisterButton("change", "Change observables", "Change Fixation Object");
            NetworkManager.instance.RegisterButton("enlarge", "Enlarge observable", "Enlarge the fixation object");
            NetworkManager.instance.RegisterButton("shrink", "Shrink observable", "Shrink the fixation object");
            NetworkManager.instance.RegisterButton("save", "Save misalignment", "Save misalignment angle");
            NetworkManager.instance.RegisterFloat("degree", -45f, 45f, 1f, "Misalignment", "Angle between the eyes.");

            //audioManager.Say("ChooseStrabismicEye");

            //if (EyeSkills.UserCalibrationManager.instance.userCalibration.leftEyeIsStrabismic)
            //    lockRightEye();
            //else if (EyeSkills.UserCalibrationManager.instance.userCalibration.rightEyeIsStrabismic)
            //lockLeftEye();

            esInput = EyeSkillsInput.instance;

            observables.AlterDistanceLeft((float)startingDistance, true);
            observables.AlterDistanceRight((float)startingDistance, true);

            StartCoroutine(followMisalignmentAngle());
        }
Ejemplo n.º 3
0
        public void Start()
        {
            cameraRig = FindObjectOfType <EyeSkillsCameraRig>();
            esInput   = EyeSkillsInput.instance;

            audioManager = AudioManager.instance;
            //audioManager.Say("ChooseStrabismicEye");

            if (cameraRig.config.leftEyeIsStrabismic)
            {
                audioManager.Say("LeftStrabismic");
                unlockLeftEye();
            }
            else
            {
                audioManager.Say("RightStrabismic");
                unlockRightEye();
            }

            practitionerMode = (PlayerPrefs.GetString("EyeSkills.practitionerMode") == "1") ? true : false;

            NetworkManager.instance.RegisterScene("Detect Eye Misalignment", "To what degree are the participants eyes misaligned?");
            NetworkManager.instance.RegisterButton("unlockRight", "Unlock right eye", "Right eye is strabismic");
            NetworkManager.instance.RegisterButton("unlockLeft", "Unlock left eye", "Left eye is strabismic");
            NetworkManager.instance.RegisterButton("save", "Save misalignment", "Save misalignment angle");
            NetworkManager.instance.RegisterFloat("degree", -45f, 45f, 1f, "Misalignment", "Angle between the eyes.");

            StartCoroutine(trackAndReportEyeMisalignment());
        }
        /// <summary>
        /// The second phase of initialisation. Driving to this approach because embedding a closure in SwitchTo3D causes
        /// AndroidPlayer([email protected]:34999) [EGL] Failed to create window surface: EGL_BAD_ALLOC: EGL failed to allocate resources for the requested operation.
        /// </summary>
        /// <param name="sucess">If set to <c>true</c> sucess.</param>
        public virtual void InitPart2(bool sucess)
        {
            //Get an explicit reference to the EyeSkillCameraRig
            cameraRig = multiCameraController.GetVRCameraRig();

            assetSwitcher = cameraRig.GetComponent <AssetSwitcher>();

            initCallback(true); //We know that the 3D switch has completed, and we are ready to continue to "Start"
        }
        void Start()
        {
            cameraRig = FindObjectOfType <EyeSkillsCameraRig>();

            NetworkManager.instance.RegisterScene("Eye Straightening", "How long can the participant hold fusion as we straighten up their world?");
            NetworkManager.instance.RegisterButton("start", "Start/Re-start straightening", "Start/Re-start straightening the eye from the misaligned position");
            NetworkManager.instance.RegisterButton("stop", "Stop straightening", "Pause the straightening at the point fusion was lost");
            NetworkManager.instance.RegisterFloat("degree", -45f, 45f, 1f, "Misalignment", "Angle between the eyes.");

            FetchEyeCalibration();
            ResetEyeMisalignment();
        }
        IEnumerator DemonstrateBinocularSuppressionRatio(EyeSkillsVRHeadsetInput ratioController,
                                                         ConflictZoneModel model,
                                                         EyeSkillsCameraRig cameraRig)
        {
            float brightnessRatio;

            while (true)
            {
                //Update the luminance ratio for each eye
                brightnessRatio = Mathf.Clamp(ratioController.getVerticalDirection(), -1, 1);

                cameraRig.SetBinocularSuppressionRatio(brightnessRatio);

                yield return(null);
            }
        }
Ejemplo n.º 7
0
        void Start()
        {
            NetworkManager.instance.RegisterButton("inConflict", "Add Conflict", "Put the eyes into conflict");
            NetworkManager.instance.RegisterButton("outOfConflict", "Remove Conflict", "Remove conflict");
            NetworkManager.instance.RegisterButton("save", "Save ratio", "Save the luminance ratio between the eyes");
            NetworkManager.instance.RegisterFloat("brightnessRatio", -1f, 1f, 0.05f, "Luminance ratio", "Brightness ratio between the eyes.");

            ignoreStillnessSensor = (PlayerPrefs.GetString("EyeSkills.practitionerMode") == "1") ? true : false;

            ratioController = new EyeSkillsVRHeadsetInput(trackedCamera);
            esInput         = EyeSkillsInput.instance;
            cameraRig       = cameraRigObject.GetComponent <EyeSkillsCameraRig>();
            brightnessRatio = 0;
            model.IntoConflict();
            AudioManager.instance.Say("inConflict");
            model.Show(true);
        }
        /// <summary>
        /// Identifies the binocular suppression ratio.
        /// </summary>
        /// <returns>The binocular suppression ratio.</returns>
        /// <param name="ratioController">Ratio controller. Where we get our signal from to alter the ratio - e.g. tilt angle of head</param>
        /// <param name="model">Model. The model that provides us the conflict images.</param>
        /// <param name="cameraRig">Camera rig. The cameras whose relative brightness we wish to alter.</param>
        /// <param name="stillness">Stillness. Where we get information from about whether or not the headset is still. Might have been better to pass this headset specific mechanism to the VRHeadsetInput, which could have implemented a generic interface for reporting "stillness"</param>
        /// <param name="indicator">Indicator. The element which informs the user about the progress of the stillness based selection</param>
        /// <param name="NextStep">Next step.</param>
        /// <param name="secondsOfStillnessForSelect">Seconds of stillness for select.</param>
        IEnumerator IdentifyBinocularSuppressionRatio(EyeSkillsVRHeadsetInput ratioController,
                                                      ConflictZoneModel model,
                                                      EyeSkillsCameraRig cameraRig,
                                                      EyeSkillsVRHeadsetSelectByStillness stillness,
                                                      SelectionIndicatorScaler indicator,
                                                      Action <float> NextStep,
                                                      float secondsOfStillnessForSelect)
        {
            float suppressionRatio = 0;
            float still            = 0;
            float brightnessRatio;

            stillness.StartTracking();

            //TODO : We probably need a maximum timeout for if the headset never appears to settle!
            still = stillness.getTimeStill();
            while (still < secondsOfStillnessForSelect)
            {
                //Update the luminance ratio for each eye
                brightnessRatio = Mathf.Clamp(ratioController.getVerticalDirection(), -1, 1);

                cameraRig.SetBinocularSuppressionRatio(brightnessRatio);

                //redraw the indicator and play a rising tone?!?
                float percentage = (still / secondsOfStillnessForSelect);
                //Debug.Log("Percentage still " + percentage);
                indicator.SetIndicatorPercentage(percentage);
                yield return(null);

                still = stillness.getTimeStill();

                //Debug.Log("ratio : "+ brightnessRatio + " %:" + percentage);
            }

            stillness.StopTracking();
            indicator.Reset();
            StopConflictEnvironment();
            NextStep(suppressionRatio);
        }