public TouchSpeechToTextService() { MappingService = Mvx.Resolve <ITextFromSpeechMappingService>(); TextToSpeechService = Mvx.Resolve <ITextToSpeechService>(); observer = new OEEventsObserver(); observer.WeakDelegate = new MyOpenEarsEventsObserverDelegate(this); pocketSphinxController = new OEPocketsphinxController(); fliteController = new OEFliteController(); firstVoiceToUse = "cmu_us_slt"; secondVoiceToUse = "cmu_us_rms"; pathToLanguageModel = NSBundle.MainBundle.ResourcePath + System.IO.Path.DirectorySeparatorChar + "OpenEars1.languagemodel"; pathToDictionary = NSBundle.MainBundle.ResourcePath + System.IO.Path.DirectorySeparatorChar + "OpenEars1.dic"; pathToAcousticModel = NSBundle.MainBundle.ResourcePath + System.IO.Path.DirectorySeparatorChar + "AcousticModelEnglish.bundle"; }
private void startListening(string pathLangModel, string pathDictionary) { if (OEPocketsphinxController.SharedInstance().isListening) { OEPocketsphinxController.SharedInstance().ChangeLanguageModelToFile( pathLangModel, pathDictionary); } else { string acousticModelPath = OEAcousticModel.PathToModel("AcousticModelEnglish"); OEPocketsphinxController.SharedInstance().StartListeningWithLanguageModelAtPath( pathLangModel, pathDictionary, acousticModelPath, false); } }