public static void RequestAuthorization() { SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { // Take action based on status switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: // User has approved speech recognition SpeechEnabled = true; break; case SFSpeechRecognizerAuthorizationStatus.Denied: // User has declined speech recognition SpeechEnabled = false; break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: // Waiting on approval SpeechEnabled = false; break; case SFSpeechRecognizerAuthorizationStatus.Restricted: // The device is not permitted SpeechEnabled = true; break; } }); }
public override void ViewDidAppear(bool animated) { speechRecognizer.Delegate = this; SFSpeechRecognizer.RequestAuthorization(authStatus => { // The callback may not be called on the main thread. Add an // operation to the main queue to update the record button's state. NSOperationQueue.MainQueue.AddOperation(() => { switch (authStatus) { case SFSpeechRecognizerAuthorizationStatus.Authorized: recordButton.Enabled = true; break; case SFSpeechRecognizerAuthorizationStatus.Denied: recordButton.Enabled = false; recordButton.SetTitle("User denied access to speech recognition", UIControlState.Disabled); break; case SFSpeechRecognizerAuthorizationStatus.Restricted: recordButton.Enabled = false; recordButton.SetTitle("Speech recognition restricted on this device", UIControlState.Disabled); break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: recordButton.Enabled = false; recordButton.SetTitle("Speech recognition not yet authorized", UIControlState.Disabled); break; } }); }); }
// 音声認識の開始処理 public void StartRecognizing() { RecognizedText = string.Empty; IsRecognizing = true; // 音声認識の許可をユーザーに求める。 SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: // 音声認識がユーザーに許可された場合、必要なインスタンスを生成した後に音声認識の本処理を実行する。 // SFSpeechRecognizerのインスタンス生成時、コンストラクタの引数でlocaleを指定しなくても、 // 端末の標準言語が日本語なら日本語は問題なく認識される。 audioEngine = new AVAudioEngine(); speechRecognizer = new SFSpeechRecognizer(); recognitionRequest = new SFSpeechAudioBufferRecognitionRequest(); startRecognitionSession(); break; default: // 音声認識がユーザーに許可されなかった場合、処理を終了する。 return; } } ); }
private async Task <Tuple <bool, CMPSpeechError> > CheckAuthorizationAsync() { Tuple <bool, CMPSpeechError> authorizationStatus = null; await Task.Run(() => { try { SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { if (status != SFSpeechRecognizerAuthorizationStatus.Authorized) { var genericError = PrepareAuthorizationError(status); authorizationStatus = new Tuple <bool, CMPSpeechError>(false, genericError); return; } authorizationStatus = new Tuple <bool, CMPSpeechError>(true, null); _speechSemaphore.Release(); }); } catch (Exception exception) { Diagonostics.Debug.WriteLine(exception.Message); _speechSemaphore.Release(); } }); await _speechSemaphore.WaitAsync(); return(authorizationStatus); }
public override async Task <AccessState> RequestAccess() { var status = AccessState.Available; if (!UIDevice.CurrentDevice.CheckSystemVersion(10, 0)) { status = AccessState.NotSupported; } else { var nativeStatus = SFSpeechRecognizer.AuthorizationStatus; if (nativeStatus != SFSpeechRecognizerAuthorizationStatus.NotDetermined) { status = FromNative(nativeStatus); } else { var tcs = new TaskCompletionSource <AccessState>(); SFSpeechRecognizer.RequestAuthorization(x => tcs.SetResult(FromNative(x))); status = await tcs.Task.ConfigureAwait(false); } } return(status); }
//#Bonsai Snippets - Declarations public override void SetRepositories() { _Service = new FoodListFromSpeachService <FoodListFromSpeachViewModel>((U, C, A) => ExecuteQueryWithReturnTypeAndNetworkAccessAsync <FoodListFromSpeachViewModel>(U, C, A)); _Repository = new FoodListFromSpeachRepository <FoodListFromSpeachViewModel>(_MasterRepo, _Service); //#Bonsai Snippets - ServicedRepos SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { // Take action based on status switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: break; case SFSpeechRecognizerAuthorizationStatus.Denied: break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: break; case SFSpeechRecognizerAuthorizationStatus.Restricted: break; } }); }
public void RecordSpeachToText() { if (SFSpeechRecognizer.AuthorizationStatus == SFSpeechRecognizerAuthorizationStatus.Authorized) { StartSpeechRecognizer(); } else { SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { if (status == SFSpeechRecognizerAuthorizationStatus.Authorized) { StartSpeechRecognizer(); } else // No Permission to recognize Speech { var alert = UIAlertController.Create("No Permission", "Permission for Audio Recording denied", UIAlertControllerStyle.Alert); alert.AddAction(UIAlertAction.Create("Ok", UIAlertActionStyle.Cancel, null)); UIApplication.SharedApplication.KeyWindow .RootViewController.PresentViewController(alert, true, null); } }); } }
/* Voice recognition reference: * https://docs.microsoft.com/en-us/xamarin/ios/platform/speech?tabs=vsmac */ public UserAssessment() { InitializeComponent(); /* Load the initial passage of text. */ Passage.Text = textToRead; /* Request user authorization to enable the microphone for live transcription. */ SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: // User has approved speech recognition \ break; case SFSpeechRecognizerAuthorizationStatus.Denied: // User has declined speech recognition break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: // Waiting on approval break; case SFSpeechRecognizerAuthorizationStatus.Restricted: // The device is not permitted break; } }); }
void AskPermission() { // Request user authorization SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { // Take action based on status switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: InitializeProperties(); StartRecordingSession(); break; case SFSpeechRecognizerAuthorizationStatus.Denied: // User has declined speech recognition break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: // Waiting on approval break; case SFSpeechRecognizerAuthorizationStatus.Restricted: // The device is not permitted break; } }); }
public Task RequestAccess() { var tcs = new TaskCompletionSource <object>(); SFSpeechRecognizer.RequestAuthorization(_ => { tcs.SetResult(null); }); return(tcs.Task); }
private void initSpeakerButton() { SpeakerButton.Enabled = false; //Click once to start recording, click twice to end recording SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus auth) => { bool buttonIsEnabled = false; switch (auth) { case SFSpeechRecognizerAuthorizationStatus.Authorized: buttonIsEnabled = true; var node = audioEngine.InputNode; var recordingFormat = node.GetBusOutputFormat(0); node.InstallTapOnBus(0, 1024, recordingFormat, (AVAudioPcmBuffer buffer, AVAudioTime when) => { recognitionRequest.Append(buffer); }); break; case SFSpeechRecognizerAuthorizationStatus.Denied: buttonIsEnabled = false; break; case SFSpeechRecognizerAuthorizationStatus.Restricted: buttonIsEnabled = false; break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: buttonIsEnabled = false; break; } InvokeOnMainThread(() => { SpeakerButton.Enabled = buttonIsEnabled; }); }); //Event triggered when the button is pressed SpeakerButton.TouchUpInside += delegate { if (audioEngine.Running == true) { StopRecording(); SpeakerButton.Highlighted = false; } else { StartRecording(); resetTexts(); YouAskedLabel.Text = "Listening..."; SpeakerButton.Highlighted = true; } }; }
public SpeechToText() { _audioEngine = new AVAudioEngine(); _speechRecognizer = new SFSpeechRecognizer(); _speechRecognitionRequest = new SFSpeechAudioBufferRecognitionRequest(); _speechRecognitionTask = new SFSpeechRecognitionTask(); SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { // We're going to assume that you've selected to authorise the request, otherwise, // you're wasting your own time. }); }
public override void ViewDidLoad() { base.ViewDidLoad(); // Register with app delegate ThisApp.Controller = this; RecognizeButton.TouchUpInside += RecognizeButtonTouchUpInside; RecognizeButton.Enabled = false; SFSpeechRecognizer.RequestAuthorization(HandleVoiceAuthorization); SetupUserActionSearch(); SetupCoreSpotlightSearch(); }
public override void ViewDidLoad() { base.ViewDidLoad(); // Perform any additional setup after loading the view, typically from a nib. commandBtn.Enabled = false; SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus auth) => { bool buttonIsEnabled = false; switch (auth) { case SFSpeechRecognizerAuthorizationStatus.Authorized: buttonIsEnabled = true; var node = audioEngine.InputNode; var recordingFormat = node.GetBusOutputFormat(0); node.InstallTapOnBus(0, 1024, recordingFormat, (AVAudioPcmBuffer buffer, AVAudioTime when) => { recognitionRequest.Append(buffer); }); break; case SFSpeechRecognizerAuthorizationStatus.Denied: buttonIsEnabled = false; break; case SFSpeechRecognizerAuthorizationStatus.Restricted: buttonIsEnabled = false; break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: buttonIsEnabled = false; break; } InvokeOnMainThread(() => { commandBtn.Enabled = buttonIsEnabled; }); }); _libVLC = new LibVLC(); _mediaPlayer = new LibVLCSharp.Shared.MediaPlayer(_libVLC); _videoView = new VideoView { MediaPlayer = _mediaPlayer }; _videoView.Frame = new CoreGraphics.CGRect(0, 0, this.View.Bounds.Size.Width, this.View.Bounds.Size.Height / 2); View.AddSubview(_videoView); _videoView.MediaPlayer.Play(new Media(_libVLC, VIDEO_URL, FromType.FromLocation)); }
public void RequestPemission() { request = PermissionRequest.Pending; SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: request = PermissionRequest.Allowed; break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: request = PermissionRequest.Pending; break; } }); }
internal static Task <PermissionStatus> RequestSpeechPermission() { if (SpeechPermissionStatus != PermissionStatus.Unknown) { return(Task.FromResult(SpeechPermissionStatus)); } if (!UIDevice.CurrentDevice.CheckSystemVersion(10, 0)) { return(Task.FromResult(PermissionStatus.Unknown)); } #if __IOS__ var tcs = new TaskCompletionSource <PermissionStatus>(); SFSpeechRecognizer.RequestAuthorization(status => { switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: tcs.TrySetResult(PermissionStatus.Granted); break; case SFSpeechRecognizerAuthorizationStatus.Denied: tcs.TrySetResult(PermissionStatus.Denied); break; case SFSpeechRecognizerAuthorizationStatus.Restricted: tcs.TrySetResult(PermissionStatus.Restricted); break; default: tcs.TrySetResult(PermissionStatus.Unknown); break; } }); return(tcs.Task); #elif __TVOS__ return(Task.FromResult(PermissionStatus.Granted)); #endif }
private void AskForSpeechPermission() { SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: MessagingCenter.Send <ISpeechToText>(this, "Authorized"); break; case SFSpeechRecognizerAuthorizationStatus.Denied: throw new Exception("Audio permission denied"); case SFSpeechRecognizerAuthorizationStatus.NotDetermined: throw new Exception("Audio permission not available"); case SFSpeechRecognizerAuthorizationStatus.Restricted: throw new Exception("Audio permission denied"); } }); }
private void AskForSpeechPermission() { SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => { switch (status) { case SFSpeechRecognizerAuthorizationStatus.Authorized: _isAuthorized = true; break; case SFSpeechRecognizerAuthorizationStatus.Denied: break; case SFSpeechRecognizerAuthorizationStatus.NotDetermined: break; case SFSpeechRecognizerAuthorizationStatus.Restricted: break; } }); }
public override IObservable <AccessState> RequestAccess() => Observable.Create <AccessState>(ob => { if (!UIDevice.CurrentDevice.CheckSystemVersion(10, 0)) { ob.Respond(AccessState.NotSupported); } else { var status = SFSpeechRecognizer.AuthorizationStatus; if (status != SFSpeechRecognizerAuthorizationStatus.NotDetermined) { ob.Respond(FromNative(status)); } else { SFSpeechRecognizer.RequestAuthorization(x => ob.Respond(FromNative(x))); } } return(Disposable.Empty); });
static Task DoStart() { if (Device.OS.IsBeforeiOS(10)) { throw new Exception("This feature is not supported in this device. Please upgrade your iOS."); } SFSpeechRecognizer.RequestAuthorization(status => { if (status == SFSpeechRecognizerAuthorizationStatus.Authorized) { StartRecording(); } else { Stop(); throw new Exception("Speech recognition authorization request was denied."); } }); return(Task.CompletedTask); }
private bool AskForSpeechPermission() { var authorizationStatus = SFSpeechRecognizerAuthorizationStatus.Denied; SFSpeechRecognizer.RequestAuthorization(status => { authorizationStatus = status; }); switch (authorizationStatus) { case SFSpeechRecognizerAuthorizationStatus.Authorized: return(true); case SFSpeechRecognizerAuthorizationStatus.Denied: throw new AccessViolationException("User denied access to speech recognition"); case SFSpeechRecognizerAuthorizationStatus.NotDetermined: throw new AccessViolationException("Speech recognition restricted on this device"); case SFSpeechRecognizerAuthorizationStatus.Restricted: throw new AccessViolationException("Speech recognition not yet authorized"); } return(false); }
/// <summary> /// This method will show a popup for accepting or declining speech recognition. /// </summary> public static void RequestAuthorization() { SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => {}); }
public iOSAudioService() { SFSpeechRecognizer.RequestAuthorization(SpeechRecognizerAuthChanged); }