// Make sure that the recorder + player are unallocated when the app goes into the background protected override void OnPause() { base.OnPause(); if (audioManager != null) { audioManager.CleanUp(); audioManager = null; } if (mediaPlayer != null) { mediaPlayer.Release(); mediaPlayer = null; } if (eventVideo.IsPlaying) { eventVideo.StopPlayback(); } if (tts != null) { tts.Clean(); tts = null; } }
/// <summary> /// Progress to the next part of the scenario /// </summary> private void ShowNextEvent() { currIndex++; string ttsHelp = "Tap the text to listen to it spoken aloud!"; mainButton.Text = "Record Response"; mainButton.SetBackgroundResource(Resource.Drawable.recordButtonBlue); if (tts == null) { tts = new TTSManager(this, null); } canSpeak = true; if (tts.IsSpeaking()) { tts.StopSpeaking(); } // Check if the scenario is complete if (currIndex >= scenario.ParticipantTasks.Length) { ExportRecordings(); return; } Title = scenario.Title + " | " + (currIndex + 1) + " of " + scenario.ParticipantTasks.Length; inputHint.Visibility = ViewStates.Visible; // Use the alternative layout for giving the user a choice between 2 items if (scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Type == TaskResponse.ResponseType.Choice) { mainLayout.Visibility = ViewStates.Gone; choiceLayout.Visibility = ViewStates.Visible; choicePrompt.Text = scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Prompt; // Load the choice images // TODO allow for more choices if (scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Related != null) { string choice1Key = scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Related[0]; if (resources.ContainsKey(choice1Key)) { choiceImage1.SetImageURI(Android.Net.Uri.FromFile(new Java.IO.File(resources[choice1Key]))); } } if (scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Related != null) { string choice2Key = scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Related[1]; if (resources.ContainsKey(choice2Key)) { choiceImage2.SetImageURI(Android.Net.Uri.FromFile(new Java.IO.File(resources[choice2Key]))); } } helpText = ttsHelp + "\nRead or listen to the prompt and decide which image is most likely the solution. Tap the image to make your choice!"; } else { // Use the standard layout mainLayout.Visibility = ViewStates.Visible; choiceLayout.Visibility = ViewStates.Gone; if (scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Type == TaskResponse.ResponseType.None) { canSpeak = false; inputHint.Visibility = ViewStates.Gone; eventPrompt.Text = ""; eventPrompt.SetTypeface(null, TypefaceStyle.Normal); mainButton.Text = "Continue"; helpText = ttsHelp + "\nPress the Continue button to advance the practiceActivity."; } // Load text else if (scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Type == TaskResponse.ResponseType.Freeform) { // Make freeform prompts italic string given = scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Prompt; eventPrompt.SetTypeface(null, TypefaceStyle.BoldItalic); eventPrompt.Text = (given != null) ? given : ""; inputHint.Text = "Your response:"; helpText = ttsHelp + "\nPress the record button follow the instruction below \"Your Response\". Speak as clearly and loud as you can!"; } else { eventPrompt.Text = scenario.ParticipantTasks[currIndex].ParticipantTaskResponse.Prompt; eventPrompt.SetTypeface(null, TypefaceStyle.Normal); inputHint.Text = "Please say this:"; helpText = ttsHelp + "\nPress the record button read the text below \"Please say this\". Speak as clearly and loud as you can, trying to be as accurate to the text as possible!"; } } eventTranscript.Text = scenario.ParticipantTasks[currIndex].ParticipantTaskContent.Text; if (scenario.ParticipantTasks[currIndex].ParticipantTaskContent.Type == TaskContent.ContentType.Video) { // load video instead of audio + image SetDefaultWindowColours(); eventVideo.Visibility = ViewStates.Visible; eventImage.Visibility = ViewStates.Gone; string vidKey = scenario.ParticipantTasks[currIndex].ParticipantTaskContent.Visual; var vidUri = Android.Net.Uri.Parse(resources[vidKey]); eventVideo.SetVideoURI(vidUri); eventVideo.Start(); } else { eventVideo.Visibility = ViewStates.Gone; eventImage.Visibility = ViewStates.Visible; // Load the image if it exists string visualKey = scenario.ParticipantTasks[currIndex].ParticipantTaskContent.Visual; if (visualKey != null && resources.ContainsKey(visualKey)) { Android.Net.Uri imageUri = Android.Net.Uri.FromFile(new Java.IO.File(resources[visualKey])); Bitmap bitmap = MediaStore.Images.Media.GetBitmap(ContentResolver, imageUri); eventImage.SetImageBitmap(bitmap); Palette.GenerateAsync(bitmap, this); } else { SetDefaultWindowColours(); } if (scenario.ParticipantTasks[currIndex].ParticipantTaskContent.Type == TaskContent.ContentType.Audio) { // Load audio string audioKey = scenario.ParticipantTasks[currIndex].ParticipantTaskContent.Audio; if (audioKey != null && resources.ContainsKey(audioKey)) { if (mediaPlayer == null) { mediaPlayer = new MediaPlayer(); } else { mediaPlayer.Reset(); } mediaPlayer.SetDataSource(resources[audioKey]); mediaPlayer.Prepare(); mediaPlayer.Looping = false; mediaPlayer.Start(); } else if (autoSpeak) { tts.SayLine(eventTranscript.Text, null, true); } } else if (autoSpeak) { tts.SayLine(eventTranscript.Text, null, true); } } }
protected override void OnResume() { base.OnResume(); audioManager = new AndroidUtils.RecordAudioManager(this); mainButton.SetBackgroundResource(Resource.Drawable.recordButtonBlue); if (tts == null) { tts = new TTSManager(this, null); } ISharedPreferences userPrefs = PreferenceManager.GetDefaultSharedPreferences(this); autoSpeak = userPrefs.GetBoolean("autoTTS", true); // Reload the resources for this stage of the scenario, incase they were lost (e.g. audio, video) if (currIndex >= 0) { currIndex--; ShowNextEvent(); } else { ImageView icon = FindViewById<ImageView>(Resource.Id.scenarioIcon); AndroidUtils.PrepareIcon(icon, scenario); } }