protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); SetContentView(Resource.Layout.Main); mWork = new Work(this); mAdditional = new Additional(this); Text_Title = FindViewById <TextView>(Resource.Id.textView1); Text_Speech = FindViewById <TextView>(Resource.Id.textView2); Text_Information = FindViewById <TextView>(Resource.Id.textView3); Text_Error = FindViewById <TextView>(Resource.Id.textView4); image = FindViewById <ImageView>(Resource.Id.imageView1); recButton = FindViewById <Button>(Resource.Id.btnRecord); sr = SpeechRecognizer.CreateSpeechRecognizer(this); sr.SetRecognitionListener(this); recButton.Click += delegate { if (!RecordingOn) { RecordingOn = true; speech_recognition(); } }; }
protected override void OnResume() { base.OnResume(); FakeSessionDelete = new FakeSessionDelete(); database = new SQLiteRepository(); userRepo = new UserRepository(database); contactRepo = new ContactRepository(database); configRepo = new ConfigRepository(database); database.CreateDatabase(); loginService = new LoginService(); contactService = new ContactService(); errorText = new ErrorText(); if (_continue) { speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); toSpeech = new TextToSpeech(this, this); } }
//bool isRun = false; //private TextView txt_view; //private ProgressBar processbar; protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); //SetContentView(Resource.Layout.Layout_SpeechRecognizingActivity); //Button btn_speech = (Button)FindViewById(Resource.Id.btn_start); //txt_view = (TextView)FindViewById(Resource.Id.txtSpeech); //processbar = (ProgressBar)FindViewById(Resource.Id.progress); // Create your application here speech = SpeechRecognizer.CreateSpeechRecognizer(this); speech.SetRecognitionListener(this); Intent recognizerIntent = new Intent(RecognizerIntent.ActionRecognizeSpeech); recognizerIntent.PutExtra(RecognizerIntent.ExtraLanguagePreference, "en-US"); recognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 50000); recognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 1500); recognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 50000); recognizerIntent.PutExtra(RecognizerIntent.ExtraMaxResults, 1); recognizerIntent.PutExtra(RecognizerIntent.ExtraPartialResults, true); //btn_speech.Click += delegate //{ // isRun = !isRun; // speech.StartListening(recognizerIntent); //}; }
protected override void OnResume() { base.OnResume(); nomusuario.Click += delegate { StopItems(); }; tipo.Click += delegate { StopItems(); }; velocidad.Click += delegate { StopItems(); }; activacion.Click += delegate { StopItems(); }; loginService = new LoginService(); userService = new UserService(); try { client = loginService.Connect(); if (client.IsUserAuthorized()) { usuario = client.Session.TLUser; } } catch (Exception ex) { this.FinishAffinity(); } database = new SQLiteRepository(); userRepository = new UserRepository(database); configRepository = new ConfigRepository(database); errorText = new ErrorText(); configuracion = configRepository.GetConfig(); speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); toSpeech = new TextToSpeech(this, this); gestureDetector = new GestureDetector(this); }
void StartRecognizer() { _speechRecognizer = SpeechRecognizer.CreateSpeechRecognizer(Application.Context); platformService.SetAudioEnabled(_firstInit); _speechRecognizer.SetRecognitionListener(this); _speechRecognizer.StartListening(VoiceIntent); }
private async Task <int> listenRequest() { _speech = SpeechRecognizer.CreateSpeechRecognizer(_context); _speech.SetRecognitionListener(this); _tcs = new TaskCompletionSource <Java.Lang.Object>(); try{ _speech.StartListening(_stt.IntentSTT()); } catch (Exception e) {} return((int)await _tcs.Task); }
void startover() { _speech.Destroy(); _speech = SpeechRecognizer.CreateSpeechRecognizer(this._context); _speech.SetRecognitionListener(this); _speechIntent = new Intent(RecognizerIntent.ActionRecognizeSpeech); _speechIntent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 1000); _speechIntent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 1000); _speechIntent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 1500); StartListening(); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); if (!isOnline()) { mErrorDialog.Show(); return; } SetContentView(Resource.Layout.activity_get_started); mLocationManager = GetSystemService(LocationService) as LocationManager; mTts = new TextToSpeech(this, this, "com.google.android.tts"); speech = SpeechRecognizer.CreateSpeechRecognizer(this); speech.SetRecognitionListener(this); Criteria mLocationServiceCriteria = new Criteria { Accuracy = Accuracy.Coarse, PowerRequirement = Power.Medium }; IList <string> acceptableLocationProviders = mLocationManager.GetProviders(mLocationServiceCriteria, true); if (acceptableLocationProviders.Any()) { mLocationProvider = acceptableLocationProviders.First(); } else { mLocationProvider = string.Empty; } mSharedPreference = GetSharedPreferences(Constants.MY_PREF, 0); token = mSharedPreference.GetString("token", " "); mEditor = mSharedPreference.Edit(); mLoadingDialog = new LoadingDialog(this, Resource.Drawable.main); mLoadingDialog.SetCancelable(false); Window window = mLoadingDialog.Window; window.SetLayout(WindowManagerLayoutParams.MatchParent, WindowManagerLayoutParams.MatchParent); window.SetBackgroundDrawable(new ColorDrawable(Resources.GetColor(Resource.Color.trans))); SpannableString s = new SpannableString("Contoso Cabs"); typeface = Typeface.CreateFromAsset(this.Assets, "JosefinSans-SemiBold.ttf"); s.SetSpan(new TypefaceSpan("Amaranth-Regular.ttf"), 0, s.Length(), SpanTypes.ExclusiveExclusive); s.SetSpan(new ForegroundColorSpan(this.Resources.GetColor(Resource.Color.title)), 0, s.Length(), SpanTypes.ExclusiveExclusive); this.TitleFormatted = s; mGetStarted = FindViewById <Button>(Resource.Id.btnGetStarted); mUri = FindViewById <Button>(Resource.Id.btnuri); mSpeak = FindViewById <ImageView>(Resource.Id.imagespeech); mTextView = FindViewById <TextView>(Resource.Id.bookText); mTextView.SetTypeface(typeface, TypefaceStyle.Normal); mGetStarted.SetTypeface(typeface, TypefaceStyle.Normal); mGetStarted.SetOnClickListener(this); mUri.SetOnClickListener(this); mSpeak.SetOnClickListener(this); }
public CustomRecognizer(Context _context) { this._context = _context; Words = ""; _speech = SpeechRecognizer.CreateSpeechRecognizer(this._context); _speech.SetRecognitionListener(this); _speechIntent = new Intent(RecognizerIntent.ActionRecognizeSpeech); _speechIntent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); _speechIntent.PutExtra(RecognizerIntent.ActionRecognizeSpeech, RecognizerIntent.ExtraPreferOffline); _speechIntent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 1000); _speechIntent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 1000); _speechIntent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 1500); }
static Task DoStart() { if (AndroidRecognizer == null) { StandardListener = new RecognitionListener(); AndroidRecognizer = SpeechRecognizer.CreateSpeechRecognizer(UIRuntime.NativeRootScreen as AndroidOS.BaseActivity); AndroidRecognizer.SetRecognitionListener(StandardListener); AndroidRecognizer.StartListening(CreateIntent()); IsStopped = false; } return(Task.CompletedTask); }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); Context context = this; // initilize the speech recognizer sr = SpeechRecognizer.CreateSpeechRecognizer(this); sr.SetRecognitionListener(this); if (Android.OS.Build.VERSION.SdkInt >= Android.OS.BuildVersionCodes.JellyBean) { AudioManager am = (AudioManager)Application.Context.GetSystemService(Context.AudioService); am.SetStreamMute(Stream.Music, true); } //// initilize the intent //intent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); //intent.PutExtra(RecognizerIntent.ExtraCallingPackage, Application.Context.PackageName); //intent.PutExtra(RecognizerIntent.ExtraPartialResults, true); //intent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 1500); //intent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 1500); //intent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 180000000); // Set our view from the "main" layout resource SetContentView(Resource.Layout.Main); recButton = FindViewById <Button>(Resource.Id.btnRecord); textBox = FindViewById <TextView>(Resource.Id.textYourText); recButton.Click += RecButton_Click; // check to see if we can actually record - if we can, assign the event to the button string rec = Android.Content.PM.PackageManager.FeatureMicrophone; if (rec != "android.hardware.microphone") { // no microphone, no recording. Disable the button and output an alert var alert = new AlertDialog.Builder(recButton.Context); alert.SetTitle("You don't seem to have a microphone to record with"); alert.SetPositiveButton("OK", (sender, e) => { textBox.Text = "No microphone present"; recButton.Enabled = false; return; }); alert.Show(); } }
public void Start() { speechRecognizer = SpeechRecognizer.CreateSpeechRecognizer(Plugin.CurrentActivity.CrossCurrentActivity.Current.Activity); speechRecognizer.SetRecognitionListener(this); var intent = new Intent(RecognizerIntent.ActionRecognizeSpeech); intent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); intent.PutExtra(RecognizerIntent.ExtraMaxResults, 1); intent.PutExtra(RecognizerIntent.ExtraPartialResults, true); intent.PutExtra(RecognizerIntent.ExtraCallingPackage, Plugin.CurrentActivity.CrossCurrentActivity.Current.Activity.PackageName); intent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 1500); intent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 1500); intent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 15000); speechRecognizer.StartListening(intent); }
private void CreateSpeechRecognizer() { mSpeechRecognizer = SpeechRecognizer.CreateSpeechRecognizer(this); mSpeechRecognizerIntent = new Intent(RecognizerIntent.ActionRecognizeSpeech); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraCallingPackage, Application.PackageName); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraPrompt, "Speak now"); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 2000); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 2000); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 1500); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraMaxResults, 1); mSpeechRecognizerIntent.PutExtra(RecognizerIntent.ExtraLanguage, Java.Util.Locale.Default); mSpeechRecognizer.SetRecognitionListener(this); }
private void RecButton_Click(object sender, System.EventArgs e) { if (recButton.Text == "Start Recording") { recButton.Text = "End Recording"; sr.StartListening(this.CreateSpeechIntent()); } else { recButton.Text = "Start Recording"; sr.Destroy(); sr = SpeechRecognizer.CreateSpeechRecognizer(this); sr.SetRecognitionListener(this); //sr.StopListening(); } }
public void OnError([GeneratedEnum] SpeechRecognizerError error) { Android.Util.Log.WriteLine(Android.Util.LogPriority.Debug, "OnError:", error.ToString()); //sr.StopListening(); //sr.StartListening(intent); if (error == Android.Speech.SpeechRecognizerError.NoMatch) { sr.StartListening(this.CreateSpeechIntent()); } else if (error == Android.Speech.SpeechRecognizerError.SpeechTimeout) { sr.Destroy(); sr = SpeechRecognizer.CreateSpeechRecognizer(this); sr.SetRecognitionListener(this); sr.StartListening(this.CreateSpeechIntent()); } }
protected override void OnResume() { base.OnResume(); b.Click += delegate { StopItems(); StartActivity(typeof(Enviar)); }; b2.Click += delegate { StopItems(); StartActivity(typeof(MisChats)); }; b3.Click += delegate { StopItems(); StartActivity(typeof(MisContactos)); }; b4.Click += delegate { StopItems(); StartActivity(typeof(Configuracion)); }; logout.Click += delegate { count = 1; record = true; textToSpeak = "¿Está seguro que quiere cerrar la sesión? Hasta dentro de 2 horas no podrá volverse a loguear con el mismo número de teléfono."; toSpeech = new TextToSpeech(this, this); }; speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); toSpeech = new TextToSpeech(this, this); gestureDetector = new GestureDetector(this); }
public override bool OnKeyUp(Keycode keyCode, KeyEvent e) { if (keyCode == Keycode.Headsethook) { if (ButtonCount < 1) { TimeSpan tt = new TimeSpan(0, 0, 1); Device.StartTimer(tt, TestHandleFunc); } ButtonCount++; } if (keyCode == Keycode.VolumeDown) { sr = SpeechRecognizer.CreateSpeechRecognizer(this); Intent intent = new Intent(RecognizerIntent.ActionRecognizeSpeech); intent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); intent.PutExtra(RecognizerIntent.ExtraCallingPackage, "this package"); intent.PutExtra(RecognizerIntent.ExtraMaxResults, 5); sr.StartListening(intent); //sr.SetRecognitionListener(RecognitionListener); StartActivityForResult(intent, VOICE); } if (keyCode == Keycode.VolumeUp) { sr = SpeechRecognizer.CreateSpeechRecognizer(this); sr.SetRecognitionListener(listener); Intent intent = new Intent(RecognizerIntent.ActionRecognizeSpeech); intent.PutExtra(RecognizerIntent.ExtraLanguage, "en-US"); intent.PutExtra(RecognizerIntent.ExtraPreferOffline, true); intent.PutExtra(RecognizerIntent.ExtraPrompt, "CHADCS Is Listening"); intent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); intent.PutExtra(RecognizerIntent.ExtraLanguage, Java.Util.Locale.English); intent.PutExtra(RecognizerIntent.ExtraCallingPackage, "this package"); intent.PutExtra(RecognizerIntent.ExtraMaxResults, 5); sr.StartListening(intent); StartActivityForResult(intent, VOICE); } return(true); /* base.OnKeyUp(keyCode, e); */ }
protected override void OnResume() { base.OnResume(); database = new SQLiteRepository(); contactRepository = new ContactRepository(database); configRepository = new ConfigRepository(database); configuracion = configRepository.GetConfig(); errorText = new ErrorText(); speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); gestureDetector = new GestureDetector(this); toSpeech = new TextToSpeech(this, this); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); SetContentView(Resource.Layout.STT_Layout); Button btn_ins = (Button)FindViewById(Resource.Id.btn_ins); Button btn_listen = (Button)FindViewById(Resource.Id.btn_lis); TextView txt_view = (TextView)FindViewById(Resource.Id.txt_view); // Create your application here btn_ins.Click += delegate { _stt = STTLib.Instance(); }; btn_listen.Click += delegate { //StartActivityForResult(_stt.IntentSTT(),0); SpeechRecognizer speech = SpeechRecognizer.CreateSpeechRecognizer(this); speech.SetRecognitionListener(this); speech.StartListening(_stt.IntentSTT()); }; }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); SetContentView(Resource.Layout.Main); toolbar = FindViewById <Android.Support.V7.Widget.Toolbar>(Resource.Id.toolbar); SetSupportActionBar(toolbar); SupportActionBar.Title = "Voice Assistant"; annotationTextView = FindViewById <TextView>(Resource.Id.annotationTextView); speechTextView = FindViewById <TextView>(Resource.Id.speechTextView); speechIntentTextView = FindViewById <TextView>(Resource.Id.speechIntentTextView); speechStateTextView = FindViewById <TextView>(Resource.Id.speechActionTextView); progressBar = FindViewById <ProgressBar>(Resource.Id.progressBar); toggleButton = FindViewById <ToggleButton>(Resource.Id.toggleButton); progressBar.Visibility = ViewStates.Invisible; random = new System.Random(); speech = SpeechRecognizer.CreateSpeechRecognizer(this); Log.Debug(LOG_TAG, "IsRecognitionAvailable: " + SpeechRecognizer.IsRecognitionAvailable(this)); speech.SetRecognitionListener(this); tts = new TextToSpeech(this, this, "com.google.android.com"); locale = new Locale("ru"); recognizerIntent = new Intent(RecognizerIntent.ActionRecognizeSpeech); recognizerIntent.PutExtra(RecognizerIntent.ExtraLanguagePreference, "ru"); recognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 1500); recognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 1500); recognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 5000); recognizerIntent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); recognizerIntent.PutExtra(RecognizerIntent.ExtraMaxResults, 3); toggleButton.CheckedChange += ToggleButton_CheckedChange; }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); // Set our view from the "main" layout resource SetContentView(Resource.Layout.Main); sr = SpeechRecognizer.CreateSpeechRecognizer(this); sr.SetRecognitionListener(this); Button btn = FindViewById <Button>(Resource.Id.btn); btn.Click += (sender, e) => { Intent intent = new Intent(RecognizerIntent.ActionRecognizeSpeech); intent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); intent.PutExtra(RecognizerIntent.ExtraCallingPackage, "this package"); intent.PutExtra(RecognizerIntent.ExtraMaxResults, 5); sr.StartListening(intent); }; tv = FindViewById <TextView>(Resource.Id.tv); }
protected override void OnResume() { base.OnResume(); configRepo = new ConfigRepository(database); configuracion = configRepo.GetConfig(); service = new Intent(this, typeof(ReceiveService)); if (!IsMyServiceRunning(service)) { StartService(service); } speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); gestureDetector = new GestureDetector(this); toSpeech = new TextToSpeech(this, this); }
public void SetupSpeechReco() { isRecoing = true; if (audioManager != null) { audioManager.Dispose(); audioManager = null; } if (speechRecognizer != null) { speechRecognizer.Dispose(); speechRecognizer = null; } if (speechRecognizerIntent != null) { speechRecognizerIntent.Dispose(); speechRecognizerIntent = null; } audioManager = (AudioManager)this.GetSystemService(Context.AudioService); speechRecognizer = SpeechRecognizer.CreateSpeechRecognizer(this, speechComp); speechRecognizer.SetRecognitionListener(new SpeechRecognitionListener(this)); speechRecognizerIntent = new Intent(RecognizerIntent.ActionRecognizeSpeech); speechRecognizerIntent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); speechRecognizerIntent.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); speechRecognizerIntent.PutExtra(RecognizerIntent.ExtraPartialResults, true); speechRecognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputMinimumLengthMillis, 700); speechRecognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputPossiblyCompleteSilenceLengthMillis, 1); speechRecognizerIntent.PutExtra(RecognizerIntent.ExtraSpeechInputCompleteSilenceLengthMillis, 1); speechRecognizerIntent.PutExtra(RecognizerIntent.ExtraMaxResults, 20); isRecoing = false; }
protected override void OnResume() { base.OnResume(); configuracion = configRepository.GetConfig(); loginService = new LoginService(); messageService = new MessageService(); errorText = new ErrorText(); try { client = loginService.Connect(); if (client.IsUserAuthorized()) { usuario = client.Session.TLUser; } } catch (Exception ex) { this.FinishAffinity(); } try { var ch = messageRepository.GetMessages(); if (ch.Count > 0) { _chatsNotReaded = messageRepository.CountMessagesNotReaded(); var total = _chatsNotReaded.Sum(x => x.Counter); if (_chatsNotReaded.Count > 0 && _chatsNotReaded.Count <= 5) { if (total != 1) { textToSpeak = $"Tiene {total} mensajes nuevos de "; } else { textToSpeak = $"Tiene ún mensaje nuevo de "; } for (var i = 0; i < _chatsNotReaded.Count; i++) { var contact = contactRepository.GetContactByPhone(_chatsNotReaded[i].FromTo); if (_chatsNotReaded.Count > 1 && i == _chatsNotReaded.Count - 1) { if (contact != null) { textToSpeak += $"y {contact.FirstName} {contact.LastName}. "; } else { textToSpeak += $"y {_chatsNotReaded[i].FromTo}. "; } } else { if (contact != null) { textToSpeak += $"{contact.FirstName} {contact.LastName}, "; } else { textToSpeak += $"{_chatsNotReaded[i].FromTo}, "; } } } if (_chatsNotReaded.Count != 1) { textToSpeak += "¿Quiere leerlos, entrar a una conversación, borrarla, no hacer nada o volver atrás?"; } else { textToSpeak += "¿Quiere leerlo, entrar a una conversación, borrarla, no hacer nada o volver atrás?"; } } else if (_chatsNotReaded.Count > 5) { textToSpeak = $"Tiene {total} mensajes nuevos de más de 5 contactos. ¿Quiere leerlos, entrar a una conversación, borrarla, no hacer nada o volver atrás?"; } else if (_chatsNotReaded.Count == 0) { textToSpeak = $"No tiene mensajes nuevos. ¿Quiere entrar a una conversación, borrarla, no hacer nada o volver atrás?"; } } else { record = false; textToSpeak = "No tiene ninguna conversación."; } } catch (Exception ex) { textToSpeak = $"Ha ocurrido un error al obtener sus mensajes nuevos. ¿Quiere entrar a una conversación, borrarla, no hacer nada o volver atrás?"; } speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); gestureDetector = new GestureDetector(this); toSpeech = new TextToSpeech(this, this); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); //Fullscreen this.Window.SetFlags(WindowManagerFlags.KeepScreenOn, WindowManagerFlags.KeepScreenOn); this.Window.AddFlags(WindowManagerFlags.Fullscreen); SetContentView(Resource.Layout.activity_main); console = FindViewById <TextView>(Resource.Id.console); string a = DateTime.Now.ToLocalTime().ToString(); clock = FindViewById <TextClock>(Resource.Id.textView2); Temperature = FindViewById <TextView>(Resource.Id.textView3); TempImage = FindViewById <ImageView>(Resource.Id.imageView2); //Temperature data Wind = FindViewById <TextView>(Resource.Id.windText); Pressure = FindViewById <TextView>(Resource.Id.pressureText); Humidity = FindViewById <TextView>(Resource.Id.humidityText); Water = FindViewById <TextView>(Resource.Id.waterText); Battery = FindViewById <TextView>(Resource.Id.batterText); brightness = FindViewById <TextView>(Resource.Id.brightnessText); SetBrightness(100); parsing = new DataParsing(); //Temperature task updater (10 sec) Task.Run(() => { updateWithInterval(); BatteryManagement(); }); ToCityList = new MyList <TrafiListModel>(); mRecyclerViewToCity = FindViewById <RecyclerView>(Resource.Id.recyclerviewToCity); mLayoutManagerToCity = new LinearLayoutManager(this); mRecyclerViewToCity.SetLayoutManager(mLayoutManagerToCity); mAdapterToCity = new RecyclerAdapterTrafi(ToCityList, mRecyclerViewToCity, this); ToCityList.Adapter = mAdapterToCity; mRecyclerViewToCity.SetAdapter(mAdapterToCity); ToGymList = new MyList <TrafiListModel>(); mRecyclerViewToGym = FindViewById <RecyclerView>(Resource.Id.recyclerviewToGym); mLayoutManagerToGym = new LinearLayoutManager(this); mRecyclerViewToGym.SetLayoutManager(mLayoutManagerToGym); mAdapterToGym = new RecyclerAdapterTrafi(ToGymList, mRecyclerViewToGym, this); ToGymList.Adapter = mAdapterToGym; mRecyclerViewToGym.SetAdapter(mAdapterToGym); List = new MyList <LinkomanijosData>(); mRecyclerView = FindViewById <RecyclerView>(Resource.Id.recyclerview); mLayoutManager = new LinearLayoutManager(this); mRecyclerView.SetLayoutManager(mLayoutManager); mAdapter = new RecyclerAdapter(List, mRecyclerView); List.Adapter = mAdapter; mRecyclerView.SetAdapter(mAdapter); //SPEECH //For audio control audioManager = (AudioManager)GetSystemService(Context.AudioService); textToSpeech = FindViewById <Button>(Resource.Id.button); SpeechText = FindViewById <TextView>(Resource.Id.textView27); SpeechToText = FindViewById <Button>(Resource.Id.button2); lisenTome = FindViewById <TextView>(Resource.Id.textView28); tts = new TextToSpeech(this, this); sr = SpeechRecognizer.CreateSpeechRecognizer(this); sr.SetRecognitionListener(this); textToSpeech.Click += TextToSpeech_Click; SpeechToText.Click += (sender, e) => { musicOrigVol = audioManager.GetStreamVolume(Stream.Music); audioManager.SetStreamVolume(Stream.Music, 0, 0); Intent intent = new Intent(RecognizerIntent.ActionRecognizeSpeech); intent.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelFreeForm); intent.PutExtra(RecognizerIntent.ExtraCallingPackage, "this package"); intent.PutExtra(RecognizerIntent.ExtraMaxResults, 5); sr.StartListening(intent); }; //SpeechToText.Click += RecordVoice; recognition = FindViewById <Button>(Resource.Id.button3); recognitionText = FindViewById <TextView>(Resource.Id.textView6); camereImg = FindViewById <ImageView>(Resource.Id.imageView); recognition.Click += async delegate { //static final int REQUEST_IMAGE_CAPTURE = 1; //Intent takePictureIntent = new Intent(MediaStore.ActionImageCapture); //if (takePictureIntent.ResolveActivity(PackageManager) != null) //{ // StartActivityForResult(takePictureIntent, 1); //} var classs = new FaceRecognition(); string path = @"/data/user/0/camerapictureTaking.camerapictureTaking/filesEimantas.jpg"; recognitionText.Text = await classs.RecognitionFace("1", path); SpeechText.Text = "Authorization succeeded, Hello came back " + recognitionText.Text; //Speak(); }; }
protected override void OnResume() { base.OnResume(); configRepository = new ConfigRepository(database); configuracion = configRepository.GetConfig(); loginService = new LoginService(); messageService = new MessageService(); errorText = new ErrorText(); try { client = loginService.Connect(); if (client.IsUserAuthorized()) { usuario = client.Session.TLUser; } } catch (Exception ex) { this.FinishAffinity(); } try { _chats = messageRepository.GetMessagesByPhoneWithoutSeen(extra); contact = contactRepository.GetContactByPhone(extra); messageRepository.MarkMessagesAsRead(extra); var total = _chats.Sum(x => x.Mensaje.Length); if (_chats.Count > 0) { textToSpeak = $"Los mensajes nuevos de {contact.FirstName} {contact.LastName} son: "; if (total < 3900) { for (int i = 0; i < _chats.Count; i++) { int j = i + 1; if (i == _chats.Count - 1) { textToSpeak += $" y {j.ToString()} {_chats[i].Mensaje}"; } else { textToSpeak += $"{j.ToString()} {_chats[i].Mensaje}, "; } } textToSpeak += ". ¿Quiere responder?"; accion = "responder"; } else { LeerMensajesSinLeer(); } } else { textToSpeak = "¿Quiere leer desde una fecha, buscar por un mensaje, no hacer nada o volver atrás?"; accion = "leer"; } } catch (Exception ex) { textToSpeak = "Ha ocurrido un error al acceder a la base de datos. ¿Quiere leer desde una fecha, buscar por un mensaje, no hacer nada o volver atrás?"; accion = "leer"; } speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); gestureDetector = new GestureDetector(this); toSpeech = new TextToSpeech(this, this); }
protected override void OnResume() { base.OnResume(); configRepo = new ConfigRepository(database); configuracion = configRepo.GetConfig(); if (listaContactos.Count > 0) { textToSpeak = "Sus contactos bloqueados son: "; for (int i = 0; i < listaContactos.Count; i++) { if (listaContactos.Count == 1) { textToSpeak += $"{listaContactos[i]}. "; } else { if (i == (listaContactos.Count - 1)) { textToSpeak += $"y {listaContactos[i]}. "; } else { textToSpeak += $"{listaContactos[i]}, "; } } } } else { textToSpeak = "No tienes contactos bloqueados. "; } textToSpeak += "¿Quiere bloquear un contacto, desbloquear un contacto, no hacer nada o volver atrás?"; try { client = loginService.Connect(); if (client.IsUserAuthorized()) { usuario = client.Session.TLUser; } } catch (Exception ex) { this.FinishAffinity(); } speechReco = SpeechRecognizer.CreateSpeechRecognizer(this.ApplicationContext); speechReco.SetRecognitionListener(this); intentReco = new Intent(RecognizerIntent.ActionRecognizeSpeech); intentReco.PutExtra(RecognizerIntent.ExtraLanguagePreference, "es"); intentReco.PutExtra(RecognizerIntent.ExtraCallingPackage, this.PackageName); intentReco.PutExtra(RecognizerIntent.ExtraLanguageModel, RecognizerIntent.LanguageModelWebSearch); intentReco.PutExtra(RecognizerIntent.ExtraMaxResults, 1); gestureDetector = new GestureDetector(this); toSpeech = new TextToSpeech(this, this); }