/// <summary> /// Helper to sets the default image in the UIMolecule DefaultVR /// </summary> /// <param name="i">The index.</param> public void SetDefaultVRImage(UIMolecule m, string image) { //MeshRenderer i = GetMolecularChild(m, "Image").GetComponent<MeshRenderer>(); //Material material = i.materials[0]; SpriteRenderer i = GetMolecularChild(m, "Image").GetComponent <SpriteRenderer>(); if (image != "") { i.enabled = true; Sprite sprite = Resources.Load <Sprite>(image); if (sprite != null) { i.sprite = sprite; //Load the sprite - this should work for each file. //material.mainTexture = sprite.texture; } else { Debug.Log("SetDefaultVRImage could not load sprite " + image); } } else { i.enabled = false; i.sprite = null; } }
/// <summary> /// Initialise this instance. This allows us to overwrite default initialisation for unit testing with mocks. /// </summary> public virtual void Initialise(ESTrainingAtom _atom, Action <bool> _initCallback) { atom = _atom; audioManager = AudioManager.instance; if (atom.molecule == "") { atom.molecule = "default"; } //Fetch our default display element and it's parts molecule = GetMolecule(atom.molecule); molecule.container.SetActive(true); image = GetMolecularChild(molecule, "Image"); titleText = GetMolecularChild(molecule, "TitleText"); hitKeyToContinue = GetMolecularChild(molecule, "HitKeyToContinue"); //Fetch our visualisers - Singletons so we don't keep creating destroying unless necessary. //voiceVisualiser = UnityVoiceVisualiser.instance; //subsVisualiser = UnitySubTitlesVisualiser.instance; //Naiive but for now, ok. _initCallback(true); }
protected UIMolecule ActivateUIMolecule() { //Find the molecule and activate it - this only shows us the Sphere molecule = FindUIMolecule(atom.molecule); molecule.container.SetActive(true); return(molecule); }
public UIMolecule GetMolecule(string id) { GameObject go = GameObject.Find("UIMolecule-" + id); UIMolecule molecule = go.GetComponentInChildren <UIMolecule>(); return(molecule); }
public GameObject GetMolecularChild(UIMolecule molecule, string id) { try{ return((GameObject)molecule.children.Find(i => i.id == id).gameObject); } catch { Debug.Log("Could not load Molecular Child " + id); return(null); } }
public void ReviewBlinkerExperience() { //Now we want to check what the person experienced UIMolecule m = ActivateUIMolecule(); SetDefaultVRImage(m, atom.image); //audioManager.Say(atom.audioFile, DetectBlinkerSelection); audioManager.Say(atom.audioFile); DetectVerticalHeadTilt(EndBlinkerSelection, 60f); //Wait 60 seconds before timing out. }
protected void DeActivateUIMolecule() { molecule = FindUIMolecule(atom.molecule); if (molecule != null) { molecule.container.SetActive(false); } else { Debug.Log("DeActivateUIMolecule could not find molecule. Was it already deactivated?"); } }
protected UIMolecule ActivateUIMolecule() { //Find the molecule and activate it - this only shows us the Sphere molecule = FindUIMolecule(atom.molecule); if (molecule != null) { molecule.container.SetActive(true); return(molecule); } Debug.Log("ActivateUIMolecule could not find molecule."); return(null); }
/// <summary> /// Coordinating what the participant experiences on the basis of the atom's supplied physicsState. /// Doing things this way should actually simplify things like certifying the app is in known states. /// </summary> /// <param name="_completionCallback">Completion callback.</param> public override void Start(Action <String> _completionCallback) { base.Start(_completionCallback); Debug.Log("Making the camera look forward by default, without following headset rotation."); cameraRig.ImmediatelyLookToOrigin(); cameraRig.SetLeftEyePositionOnly(); cameraRig.SetRightEyePositionOnly(); Debug.Log("UIMolecule disabled by default."); DeActivateUIMolecule(); completionCallback = _completionCallback; if (atom.physicsState == 1) //Let the participant know what is coming. { ActivateUIMolecule(); audioManager.Say(atom.audioFile, delegate() { AtomExpiry(0, false, "Physics state 1 - After audio expires"); }); } else if (atom.physicsState == 2) //Present them with a conflict image { audioManager.Say(atom.audioFile, StopConflictEnvironment, 5); StartConflictEnvironment(); } else if (atom.physicsState == 3) //How did they perceive the image? The gallery. { //Allow the user to look around cameraRig.SetLeftEyeRotationAndPosition(); cameraRig.SetRightEyeRotationAndPosition(); audioManager.Say(atom.audioFile); StartPerceptionGallery(SelectedPerceptionImage); } else if (atom.physicsState == 5) //Now find out if blinkers can remove conflict for them { StartConflictEnvironment(); conflictController.OutOfConflict(); blinkerExperienceInSeconds = 6; //Allow some time after the audio has finished to keep observing audioManager.Say(atom.audioFile, delegate() { coRoutineHelper.BeginCoroutine(TimeBlinkerExperience()); }); } else if (atom.physicsState == 6) //Did the blinkers remove conflict for them? { ReviewBlinkerExperience(); } else if (atom.physicsState == 7) //TODO: Day1 - OBSOLETE? { UIMolecule m = ActivateUIMolecule(); SetDefaultVRImage(m, atom.image); ActivateContinueButton(false); ExplainBlinkerExperience(); } else if (atom.physicsState == 8) //Day2 - Present a conflict environment for ten seconds after audio ends : Similar to state 2 { audioManager.Say(atom.audioFile, StopConflictEnvironment, 10); StartConflictEnvironment(); } else if (atom.physicsState == 9) //Day2 - Show the static Blinker { Debug.Log("Explaining about Blinkers, after audio ends, we start blinking..."); //Manually start the conflict environment. StartConflictEnvironment(); conflictController.OutOfConflict(); blinkerExperienceInSeconds = 10; audioManager.Say(atom.audioFile, delegate() { coRoutineHelper.BeginCoroutine(TimeBlinkerExperience()); }); } else if (atom.physicsState == 10) //Now show the alternating Blinker toggling { blinkerExperienceInSeconds = 30; StartBlinkerEnvironment(); audioManager.Say(atom.audioFile); } else if (atom.physicsState == 11) //Present them with a conflict image { audioManager.Say(atom.audioFile, StopConflictEnvironment); StartConflictEnvironment(); } else if (atom.physicsState == 12) { //Test they can tilt their heads upwards //Get hold of the HeadsetVRInput DemoSuppression(); if (data.numberOfTiltAttempts >= maxNumberOfTiltAttempts) { data.numberOfTiltAttempts = 0; //A hack for testing. Shouldn't affect production data.Save(); } audioManager.Say(atom.audioFile, delegate(){ DetectVerticalHeadTilt(EndUpwardTiltCheck, timeToWaitForHeadTiltCheck);//We pass "0" to the EndUpwardTiltCheck to signify we ran out of time. }); } else if (atom.physicsState == 13) { DemoSuppression(); if (data.numberOfTiltAttempts >= maxNumberOfTiltAttempts) { data.numberOfTiltAttempts = 0; //A hack for testing. Shouldn't affect production data.Save(); } //Test they can tilt their heads downwards audioManager.Say(atom.audioFile, delegate() { DetectVerticalHeadTilt(EndDownwardTiltCheck, timeToWaitForHeadTiltCheck); }); } else if (atom.physicsState == 14) //Decide whether to repeat the tilt tests or not { if ((data.couldTiltUpward == false) || (data.couldTiltDownward == false)) { //Debug.Log("One of the tilts failed " + data.couldTiltUpward + data.couldTiltDownward); if (data.numberOfTiltAttempts == (maxNumberOfTiltAttempts - 1)) //We've already tried enough times and still don't have adequate tilt //Debug.Log("Too many attempts"); { data.numberOfTiltAttempts = maxNumberOfTiltAttempts; AtomExpiry(0, true, "Failed to tilt head adequately"); } else { //Let's have another go AtomExpiry(1, true, "Trying another round of head tilt tests"); } } else //They must have succeeded { AtomExpiry(2, true, "Head tilt tests succeeded"); } } else if (atom.physicsState == 15) //Time to detect the suppression ratio { //Enable head rotation cameraRig.SetLeftEyeRotationAndPosition(); cameraRig.SetRightEyeRotationAndPosition(); //Get hold of the HeadsetVRInput EyeSkillsVRHeadsetInput ratioController = GameObject.Find("Head").GetComponent <EyeSkillsVRHeadsetInput>(); //Get hold of the conflict objects StartConflictEnvironment(); //Sets conflictZoneModel SelectionIndicatorScaler indicator = cameraRig.GetComponentInChildren <SelectionIndicatorScaler>(); //Get hold of our Stillness Selector - adding it to the Head object EyeSkillsVRHeadsetSelectByStillness stillness = EyeSkillsVRHeadsetSelectByStillness.instance; audioManager.Say(atom.audioFile, delegate() { //Start a couroutine which manages the brightness control and calls a finishing function with a suppression ratio Coroutine c = coRoutineHelper.BeginCoroutine(IdentifyBinocularSuppressionRatio( ratioController, conflictController, cameraRig, stillness, indicator, delegate(float suppressionRatio) { data.binocularSuppressionRatio = suppressionRatio; data.Save(); AtomExpiry(0, false, "SuppressionRatio captured"); }, 6f )); coroutinesToShutdown.Add(c); }); } else if (atom.physicsState == 16) { ActivateUIMolecule(); //Allow the user to look around cameraRig.SetLeftEyeRotationAndPosition(); cameraRig.SetRightEyeRotationAndPosition(); audioManager.Say(atom.audioFile); StartPerceptionGallery(SelectedTolerancePerceptionImage); } }
protected void DeActivateUIMolecule() { molecule = FindUIMolecule(atom.molecule); molecule.container.SetActive(false); }