public MainPage() { NavigationPage.SetHasNavigationBar(this, false); BindingContext = ARModel.GetInstance; // populate picker with available colors //foreach (string colorName in colorDict.Keys) //{ // settingsColorPicker.Items.Add(colorName); //} InitializeComponent(); // Ugly way to update UI, this is work-around to avoid update of ObservableCollection on Async Task // Device.StartTimer(TimeSpan.FromMilliseconds(500), UpdateView); switch (Device.RuntimePlatform) { case Device.UWP: speech = CrossSpeechRecognition.Current; listener = speech.ListenUntilPause(); Device.StartTimer(TimeSpan.FromMilliseconds(10), ListenPhrase); break; default: break; } }
public AccessViewModel(IJobManager jobs, INotificationManager notifications = null, ISpeechRecognizer speech = null, IGeofenceManager geofences = null, IGpsManager gps = null, ICentralManager bluetooth = null, IBeaconManager beacons = null) { this.Append("Jobs", AccessState.Unknown, () => jobs.RequestAccess()); if (notifications != null) this.Append("Notifications", AccessState.Unknown, () => notifications.RequestAccess()); if (speech != null) this.Append("Speech", AccessState.Unknown, () => speech.RequestAccess().ToTask(CancellationToken.None)); if (gps != null) this.Append("GPS", gps.Status, () => gps.RequestAccess(true)); if (geofences != null) this.Append("Geofences", geofences.Status, () => geofences.RequestAccess()); if (bluetooth != null) this.Append("BluetoothLE Central", bluetooth.Status, () => bluetooth.RequestAccess().ToTask(CancellationToken.None)); if (beacons != null) this.Append("iBeacons", beacons.Status, () => beacons.RequestAccess(true)); }
private void InitRecognizer() { var assembly = Assembly.GetExecutingAssembly(); var engineName = _config.Recognition.Engine; var recognizerName = $"AutoCaption.Recognizers.{engineName}SpeechRecognizer"; var recognizerType = assembly.GetType(recognizerName); if (recognizerType == null || !typeof(ISpeechRecognizer).IsAssignableFrom(recognizerType)) { throw new ArgumentException($"No recognition engine found for configured engine \"{engineName}\"."); } var constructor = recognizerType.GetConstructor(Type.EmptyTypes); if (constructor == null) { throw new ArgumentException($"Recognition engine for configured engine \"{engineName}\" does not have a suitable constructor."); } _speechRecognizer = (ISpeechRecognizer)constructor.Invoke(null); _speechRecognizer.SpeechCancelled += OnSpeechCancelled; _speechRecognizer.SpeechCompleted += OnSpeechCompleted; _speechRecognizer.SpeechPartial += OnSpeechPartial; _speechRecognizer.Start(_config.Recognition); }
public MainViewModel(HeadlightViewModel headlightViewModel, ConnectionViewModel connectionViewModel,CameraViewModel cameraViewModel,ISpeechRecognizer speechRecognizer) { ViewModelsContainer.HeadlightViewModel = headlightViewModel; ViewModelsContainer.ConnectionViewModel = connectionViewModel; ViewModelsContainer.CameraViewModel = cameraViewModel; _speechRecognizer = speechRecognizer; _speechRecognizer.InitializeRecognizer(); }
public ConversationViewModel() { this.speech = CrossSpeechRecognition.Current; this.tts = CrossTextToSpeech.Current; this.Start = ReactiveCommand.CreateFromTask(this.DoConversation); this.speech.WhenListeningStatusChanged().Subscribe(x => this.IsListening = x); }
public ConversationViewModel(ISpeechRecognizer speech, ITextToSpeech tts) { this.speech = speech; this.tts = tts; this.Start = new Command(() => this.DoConversation()); speech.WhenListeningStatusChanged().Subscribe(x => this.IsListening = x); }
public MainViewModel(HeadlightViewModel headlightViewModel, ConnectionViewModel connectionViewModel, CameraViewModel cameraViewModel, ISpeechRecognizer speechRecognizer) { ViewModelsContainer.HeadlightViewModel = headlightViewModel; ViewModelsContainer.ConnectionViewModel = connectionViewModel; ViewModelsContainer.CameraViewModel = cameraViewModel; _speechRecognizer = speechRecognizer; _speechRecognizer.InitializeRecognizer(); }
public LoopbackRecognizer(string key, string region) { this.capture = new WasapiLoopbackCapture(); this.stream = new WaveCaptureStream(this.capture); this.recognizer = new AzureSpeechRecognizer(key, region, this.stream); this.recognizer.SpeechPredicted += (snd, evt) => this.SpeechPredicted?.Invoke(snd, evt); this.recognizer.SpeechRecognized += (snd, evt) => this.SpeechRecognized?.Invoke(snd, evt); }
public LoopbackRecognizer() { this.capture = new WasapiLoopbackCapture(); this.stream = new WaveCaptureStream(this.capture); this.recognizer = new LocalSpeechRecognizer(this.stream); this.recognizer.SpeechPredicted += (snd, evt) => this.SpeechPredicted?.Invoke(snd, evt); this.recognizer.SpeechRecognized += (snd, evt) => this.SpeechRecognized?.Invoke(snd, evt); }
public void ReleaseRecognizer(ISpeechRecognizer recognizer) { var iosRecognizer = recognizer as SpeechRecognizer; if (iosRecognizer == null) { throw new ArgumentOutOfRangeException(nameof(recognizer), "not an iOS speech reconizer"); } }
void Record_Released(object sender, System.EventArgs e) { if (recognizer != null) { recognizer.Stop(); recognizer.SpeechRecognized -= SpeechReceived; recognizer = null; } }
void Record_Pressed(object sender, EventArgs e) { if (recognizer == null) { recognizer = speechService.CreateRecognizer(); recognizer.SpeechRecognized += SpeechReceived; recognizer.Start(); } }
public AccessViewModel(IJobManager jobs, INotificationManager notifications = null, ISpeechRecognizer speech = null, IGeofenceManager geofences = null, IGpsManager gps = null, ICentralManager bluetooth = null, IBeaconManager beacons = null, IPushManager push = null, INfcManager nfc = null) { this.Append("Jobs", AccessState.Unknown, () => jobs.RequestAccess()); if (notifications != null) { this.Append("Notifications", AccessState.Unknown, () => notifications.RequestAccess()); } if (speech != null) { this.Append("Speech", AccessState.Unknown, () => speech.RequestAccess()); } if (gps != null) { this.Append("GPS (Background)", gps.GetCurrentStatus(true), () => gps.RequestAccess(true)); } if (geofences != null) { this.Append("Geofences", geofences.Status, () => geofences.RequestAccess()); } if (bluetooth != null) { this.Append("BluetoothLE Central", bluetooth.Status, () => bluetooth.RequestAccess().ToTask(CancellationToken.None)); } if (beacons != null) { this.Append("iBeacons (Monitoring)", beacons.GetCurrentStatus(true), () => beacons.RequestAccess(true)); } if (push != null) { this.Append("Push", AccessState.Unknown, async() => { var status = await push.RequestAccess(); return(status.Status); }); } if (nfc != null) { this.Append("NFC", AccessState.Unknown, () => nfc.RequestAccess().ToTask(CancellationToken.None)); } }
public ConversationViewModel() { this.speech = CrossSpeechRecognition.Current; this.Start = ReactiveCommand.CreateFromTask(this.DoConversation); this.speech .WhenListeningStatusChanged() .ObserveOn(RxApp.MainThreadScheduler) .Subscribe(x => this.IsListening = x); }
/// <summary> /// Creates a new CoodeBook using a speech recognizer. /// </summary> /// <param name="speechRecognizer">The speech recognizer used by the CoodBook to authenticate</param> /// <param name="authenticationThreshold">The distance upper bound used to authenticate</param> public CodeBook(ISpeechRecognizer speechRecognizer, double authenticationThreshold) { // initialize identities this.identities = new Dictionary <string, VoiceIdentity>(); // just set the recognizer this.speechRecognizer = speechRecognizer; // set the threshold this.threhold = authenticationThreshold; }
public ChatBot(ILogger <ChatBot> logger, SalesDialogBotAccessors accessors, IOrderDialogSet salesDialogSet, INaturalLanguageEngine naturalLanguageEngine, IChatter chatter, ILanguageDialogSet languageDialogSet, IViewOrdersDialogSet viewOrdersDialogSet, ISpeechRecognizer speechRecognizer) { SpeechRecognizer = speechRecognizer; ViewOrdersDialogSet = viewOrdersDialogSet; LanguageDialogSet = languageDialogSet; Chatter = chatter; NaturalLanguageEngine = naturalLanguageEngine; Logger = logger; Accessors = accessors; SalesDialogSet = salesDialogSet; }
private static void SpeechRecognizer_Recognized(ISpeechRecognizer sender, RecognizedEventArgs e) { Debug.WriteLine(e.Result.Text); if (_triggers.TryGetValue(e.Result.Text, out var list)) { foreach (var trigger in list) { _ = trigger.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { Interaction.ExecuteActions(trigger.AssociatedObject, trigger.Actions, e); }); } } }
static async Task Main(string[] args) { Console.WriteLine("Please put in \"key,region\" for Azure Cognitive Services [leave blank for local]:"); var config = Console.ReadLine().Trim(); using (var recognizer = CreateRecognizer(config)) { var recognitions = new Dictionary <string, ConsoleText>(); ISpeechRecognizer inst = recognizer; inst.SpeechRecognized += (snd, evt) => { var tmp = $"\r[{evt.Result.Offset:hh\\:mm\\:ss}]: {evt.Result.Text}\r\n"; if (recognitions.TryGetValue(evt.ResultId, out var text)) { UpdateDisplay(tmp, text); recognitions.Remove(evt.ResultId); } }; inst.SpeechPredicted += (snd, evt) => { string tmp = $"\r[{evt.Result.Offset:hh\\:mm\\:ss}]: {evt.Result.Text}\r\n"; if (!recognitions.TryGetValue(evt.ResultId, out var text)) { text = new ConsoleText { Text = tmp, Location = new Location { Column = Console.CursorLeft, Row = Console.CursorTop } }; } UpdateDisplay(tmp, text); recognitions[evt.ResultId] = text; }; await inst.StartAsync(); while (true) { Console.ReadLine(); break; } await inst.StopAsync(); } }
private async Task SetupInputAsync() { if (InvokeRequired) { BeginInvoke((Action)(async() => await SetupInputAsync())); return; } DisableInput(); this.speech = Modules.SpeechRecognizers.FirstOrDefault(); if (this.speech != null) { try { await this.speech.OpenAsync(); this.speech.Update(this.gablarski.Channels, this.gablarski.Users); this.speech.CommandStateChanged += OnCommandStateChanged; } catch (Exception ex) { MessageBox.Show("Error initializing speech recognition: " + ex, "Speech Recognition", MessageBoxButtons.OK); this.speech = null; } } Type settingType; if (Settings.InputProvider.IsNullOrWhitespace() || (settingType = Type.GetType(Settings.InputProvider)) == null) { this.inputProvider = Modules.Input.FirstOrDefault(); } else { this.inputProvider = (IInputProvider)Activator.CreateInstance(settingType); } if (this.inputProvider == null) { Settings.UsePushToTalk = false; } else { this.inputProvider.CommandStateChanged += OnCommandStateChanged; await this.inputProvider.AttachAsync(Handle); this.inputProvider.SetBindings(Settings.CommandBindings.Where(b => b.Provider.GetType().GetSimpleName() == this.inputProvider.GetType().GetSimpleName())); } }
protected virtual void Dispose(bool disposing) { if (disposing) { if (_config != null) { Toml.WriteFile(_config, "config.toml", _tomlSettings); _config = null; } _waveIn?.StopRecording(); _waveIn?.Dispose(); _waveIn = null; _speechRecognizer?.Dispose(); _speechRecognizer = null; _skFont?.Dispose(); _skFont = null; _skStrokePaint?.Dispose(); _skStrokePaint = null; _skFillPaint?.Dispose(); _skFillPaint = null; _skScreenSurface?.Dispose(); _skScreenSurface = null; _skScreenRenderTarget?.Dispose(); _skScreenRenderTarget = null; _skContext?.Dispose(); _skContext = null; _skInterface?.Dispose(); _skInterface = null; _tkContext?.Dispose(); _tkContext = null; _tkWindow?.Dispose(); _tkWindow = null; } }
public DictationViewModel(ISpeechRecognizer speech, IUserDialogs dialogs) { IDisposable token = null; speech .WhenListeningStatusChanged() .ObserveOn(RxApp.MainThreadScheduler) .Subscribe(x => this.ListenText = x ? "Stop Listening" : "Start Dictation" ); this.ToggleListen = ReactiveCommand.Create(() => { if (token == null) { if (this.UseContinuous) { token = speech .ContinuousDictation() .ObserveOn(RxApp.MainThreadScheduler) .Subscribe( x => this.Text += " " + x, ex => dialogs.Alert(ex.ToString()) ); } else { token = speech .ListenUntilPause() .ObserveOn(RxApp.MainThreadScheduler) .Subscribe( x => this.Text = x, ex => dialogs.Alert(ex.ToString()) ); } } else { token.Dispose(); token = null; } }); }
public async Task <bool> Confirm(string question, string positive, string negative, bool showDialog, CancellationToken?cancelToken) { var tcs = new TaskCompletionSource <bool>(); var cancelSrc = new CancellationTokenSource(); IDisposable dialog = null; IDisposable speech = null; if (showDialog) { this.dialogs.Confirm(new ConfirmConfig { Message = question, OkText = positive, CancelText = negative, OnAction = dr => { tcs.TrySetResult(dr); speech.Dispose(); cancelSrc.Cancel(); } }); } await this.tts.Speak(question, cancelToken : cancelSrc.Token); speech = this.speech .ListenForFirstKeyword(positive, negative) .Subscribe(text => { var r = text.Equals(positive, StringComparison.CurrentCultureIgnoreCase); dialog?.Dispose(); tcs.TrySetResult(r); }); cancelToken?.Register(() => { dialog?.Dispose(); speech.Dispose(); tcs.TrySetCanceled(); }); return(await tcs.Task); }
/// <summary> /// Optimal observable for taking command (yes/no/maybe/go away/etc) /// </summary> /// <param name="keywords"></param> /// <returns></returns> public static IObservable <string> ListenForFirstKeyword(this ISpeechRecognizer speechRecognizer, params string[] keywords) => speechRecognizer .ContinuousDictation() .Select(x => { var values = x.Split(' '); foreach (var value in values) { foreach (var keyword in keywords) { if (value.Equals(keyword, StringComparison.OrdinalIgnoreCase)) { return(value); } } } return(null); }) .Where(x => x != null) .Take(1);
public DictationViewModel(ISpeechRecognizer speech) { IDisposable token = null; speech .WhenListeningStatusChanged() .Subscribe(x => this.ListenText = x ? "Stop Listening" : "Start Dictation" ); this.ToggleListen = new Command(async() => { if (speech.Status != SpeechRecognizerStatus.Available) { this.ListenText = "Problem with speech recognition engine - " + speech.Status; return; } var granted = await speech.RequestPermission(); if (!granted) { this.ListenText = "Invalid Permissions"; return; } if (token == null) { token = speech .ContinuousDictation() //.Catch<string, Exception>(ex => Observable.Return(ex.ToString())) .Subscribe(x => this.Text += " " + x); } else { token.Dispose(); token = null; } }); }
public DictationViewModel(ISpeechRecognizer speech, IDialogs dialogs) { speech .WhenListeningStatusChanged() .SubOnMainThread(x => this.IsListening = x); this.ToggleListen = ReactiveCommand.Create(() => { if (this.IsListening) { this.Deactivate(); } else { if (this.UseContinuous) { speech .ContinuousDictation() .SubOnMainThread( x => this.Text += " " + x, ex => dialogs.Alert(ex.ToString()) ) .DisposedBy(this.DeactivateWith); } else { speech .ListenUntilPause() .SubOnMainThread( x => this.Text = x, ex => dialogs.Alert(ex.ToString()) ) .DisposedBy(this.DeactivateWith); } } }); }
public ChatViewModel(ITextToSpeech tts, ISpeechRecognizer speech, ISpeechDialogs dialogs) { this.tts = tts; speech.WhenListeningStatusChanged().Subscribe(x => this.IsListening = x); this.Start = new Command(async() => { if (speech.Status != SpeechRecognizerStatus.Available) { await tts.Speak("Problem with speech recognition engine - " + speech.Status); return; } var granted = await speech.RequestPermission(); if (!granted) { await tts.Speak("Hey Dummy! Ya you! You didn't enable permissions for the microphone"); return; } var answer = await dialogs.Prompt("Hello, please tell me your name?"); await tts.Speak($"Hello {answer}"); }); }
public SpeechDialogsImpl(ISpeechRecognizer sr, ITextToSpeech tts, IUserDialogs dialogs) { this.speech = sr; this.tts = tts; this.dialogs = dialogs; }
public SearchPageViewModel(ISpeechRecognizer speechRecognizer, IPageDialogService pageDialogService) { _speechRecognizer = speechRecognizer; _pageDialogService = pageDialogService; ListenCommand = new DelegateCommand(ListenCommandExecuted); }
public VoiceRecognizer(IMicrophoneControl microphone, ISpeechRecognizer speechRecognizer, ISsh sshClient) { this._microphone = microphone; this._speechRecognizer = speechRecognizer; this._sshClient = sshClient; }
public void ReleaseRecognizer(ISpeechRecognizer recognizer) { }
private async Task SetupInputAsync() { if (InvokeRequired) { BeginInvoke ((Action)(async () => await SetupInputAsync())); return; } DisableInput(); this.speech = Modules.SpeechRecognizers.FirstOrDefault(); if (this.speech != null) { try { await this.speech.OpenAsync(); this.speech.Update (this.gablarski.Channels, this.gablarski.Users); this.speech.CommandStateChanged += OnCommandStateChanged; } catch (Exception ex) { MessageBox.Show ("Error initializing speech recognition: " + ex, "Speech Recognition", MessageBoxButtons.OK); this.speech = null; } } Type settingType; if (Settings.InputProvider.IsNullOrWhitespace() || (settingType = Type.GetType (Settings.InputProvider)) == null) this.inputProvider = Modules.Input.FirstOrDefault(); else this.inputProvider = (IInputProvider)Activator.CreateInstance (settingType); if (this.inputProvider == null) Settings.UsePushToTalk = false; else { this.inputProvider.CommandStateChanged += OnCommandStateChanged; await this.inputProvider.AttachAsync (Handle); this.inputProvider.SetBindings (Settings.CommandBindings.Where (b => b.Provider.GetType().GetSimpleName() == this.inputProvider.GetType().GetSimpleName())); } }
private void Awake() { _speechRecognizer = GetComponent <ISpeechRecognizer>(); }