private void FinishedRecordEventHandler(AudioClip clip) { if (_startRecordButton.interactable) { _speechRecognitionState.color = Color.yellow; } if (clip == null) { return; } RecognitionConfig config = RecognitionConfig.GetDefault(); config.languageCode = ((Enumerators.LanguageCode)_languageDropdown.value).Parse(); config.audioChannelCount = clip.channels; // configure other parameters of the config if need GeneralRecognitionRequest recognitionRequest = new GeneralRecognitionRequest() { audio = new RecognitionAudioContent() { content = clip.ToBase64() }, //audio = new RecognitionAudioUri() // for Google Cloud Storage object //{ // uri = "gs://bucketName/object_name" //}, config = config }; _speechRecognition.Recognize(recognitionRequest); }
private void FinishedRecordEventHandler(AudioClip clip) { if (!_voiceDetectionToggle.isOn && _startRecordButton.interactable) { _speechRecognitionState.color = Color.yellow; } if (clip == null || !_recognizeDirectlyToggle.isOn) { return; } RecognitionConfig config = RecognitionConfig.GetDefault(); config.languageCode = ((Enumerators.LanguageCode)_languageDropdown.value).Parse(); config.speechContexts = new SpeechContext[] { new SpeechContext() { phrases = _contextPhrasesInputField.text.Replace(" ", string.Empty).Split(',') } }; config.audioChannelCount = clip.channels; // configure other parameters of the config if need GeneralRecognitionRequest recognitionRequest = new GeneralRecognitionRequest() { audio = new RecognitionAudioContent() { content = clip.ToBase64() }, //audio = new RecognitionAudioUri() // for Google Cloud Storage object //{ // uri = "gs://bucketName/object_name" //}, config = config }; if (_longRunningRecognizeToggle.isOn) { _speechRecognition.LongRunningRecognize(recognitionRequest); } else { _speechRecognition.Recognize(recognitionRequest); } }