Esempio n. 1
0
        // 音声認識の開始処理
        public void StartRecognizing()
        {
            RecognizedText = string.Empty;
            IsRecognizing  = true;

            // 音声認識の許可をユーザーに求める。
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) =>
            {
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    // 音声認識がユーザーに許可された場合、必要なインスタンスを生成した後に音声認識の本処理を実行する。
                    // SFSpeechRecognizerのインスタンス生成時、コンストラクタの引数でlocaleを指定しなくても、
                    // 端末の標準言語が日本語なら日本語は問題なく認識される。
                    audioEngine        = new AVAudioEngine();
                    speechRecognizer   = new SFSpeechRecognizer();
                    recognitionRequest = new SFSpeechAudioBufferRecognitionRequest();
                    startRecognitionSession();
                    break;

                default:
                    // 音声認識がユーザーに許可されなかった場合、処理を終了する。
                    return;
                }
            }
                                                    );
        }
Esempio n. 2
0
        private async Task <Tuple <bool, CMPSpeechError> > CheckAuthorizationAsync()
        {
            Tuple <bool, CMPSpeechError> authorizationStatus = null;
            await Task.Run(() =>
            {
                try
                {
                    SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) =>
                    {
                        if (status != SFSpeechRecognizerAuthorizationStatus.Authorized)
                        {
                            var genericError    = PrepareAuthorizationError(status);
                            authorizationStatus = new Tuple <bool, CMPSpeechError>(false, genericError);
                            return;
                        }

                        authorizationStatus = new Tuple <bool, CMPSpeechError>(true, null);
                        _speechSemaphore.Release();
                    });
                }
                catch (Exception exception)
                {
                    Diagonostics.Debug.WriteLine(exception.Message);
                    _speechSemaphore.Release();
                }
            });

            await _speechSemaphore.WaitAsync();

            return(authorizationStatus);
        }
Esempio n. 3
0
 public void RecordSpeachToText()
 {
     if (SFSpeechRecognizer.AuthorizationStatus == SFSpeechRecognizerAuthorizationStatus.Authorized)
     {
         StartSpeechRecognizer();
     }
     else
     {
         SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) =>
         {
             if (status == SFSpeechRecognizerAuthorizationStatus.Authorized)
             {
                 StartSpeechRecognizer();
             }
             else                     // No Permission to recognize Speech
             {
                 var alert = UIAlertController.Create("No Permission",
                                                      "Permission for Audio Recording denied", UIAlertControllerStyle.Alert);
                 alert.AddAction(UIAlertAction.Create("Ok", UIAlertActionStyle.Cancel, null));
                 UIApplication.SharedApplication.KeyWindow
                 .RootViewController.PresentViewController(alert, true, null);
             }
         });
     }
 }
        void AskPermission()
        {
            // Request user authorization
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => {
                // Take action based on status
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    InitializeProperties();
                    StartRecordingSession();
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    // User has declined speech recognition

                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    // Waiting on approval

                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    // The device is not permitted

                    break;
                }
            });
        }
Esempio n. 5
0
        public override void ViewDidAppear(bool animated)
        {
            speechRecognizer.Delegate = this;
            SFSpeechRecognizer.RequestAuthorization(authStatus => {
                // The callback may not be called on the main thread. Add an
                // operation to the main queue to update the record button's state.
                NSOperationQueue.MainQueue.AddOperation(() => {
                    switch (authStatus)
                    {
                    case SFSpeechRecognizerAuthorizationStatus.Authorized:
                        recordButton.Enabled = true;
                        break;

                    case SFSpeechRecognizerAuthorizationStatus.Denied:
                        recordButton.Enabled = false;
                        recordButton.SetTitle("User denied access to speech recognition", UIControlState.Disabled);
                        break;

                    case SFSpeechRecognizerAuthorizationStatus.Restricted:
                        recordButton.Enabled = false;
                        recordButton.SetTitle("Speech recognition restricted on this device", UIControlState.Disabled);
                        break;

                    case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                        recordButton.Enabled = false;
                        recordButton.SetTitle("Speech recognition not yet authorized", UIControlState.Disabled);
                        break;
                    }
                });
            });
        }
        //#Bonsai Snippets - Declarations
        public override void SetRepositories()
        {
            _Service = new FoodListFromSpeachService <FoodListFromSpeachViewModel>((U, C, A) =>
                                                                                   ExecuteQueryWithReturnTypeAndNetworkAccessAsync <FoodListFromSpeachViewModel>(U, C, A));
            _Repository = new FoodListFromSpeachRepository <FoodListFromSpeachViewModel>(_MasterRepo, _Service);
            //#Bonsai Snippets - ServicedRepos
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => {
                // Take action based on status
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:

                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:

                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:

                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:

                    break;
                }
            });
        }
Esempio n. 7
0
        /* Voice recognition reference:
         * https://docs.microsoft.com/en-us/xamarin/ios/platform/speech?tabs=vsmac
         */
        public UserAssessment()
        {
            InitializeComponent();

            /* Load the initial passage of text. */
            Passage.Text = textToRead;

            /* Request user authorization to enable the microphone for live transcription. */
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => {
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    // User has approved speech recognition                        \
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    // User has declined speech recognition
                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    // Waiting on approval
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    // The device is not permitted
                    break;
                }
            });
        }
Esempio n. 8
0
        public static void RequestAuthorization()
        {
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => {
                // Take action based on status
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    // User has approved speech recognition
                    SpeechEnabled = true;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    // User has declined speech recognition
                    SpeechEnabled = false;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    // Waiting on approval
                    SpeechEnabled = false;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    // The device is not permitted
                    SpeechEnabled = true;
                    break;
                }
            });
        }
Esempio n. 9
0
        public void RecognizeSpeech(NSUrl url)
        {
            var recognizer = new SFSpeechRecognizer(new NSLocale("en_US"));

            // Is the default language supported?
            if (recognizer == null)
            {
                return;
            }

            // Create recognition task and start recognition
            var request = new SFSpeechUrlRecognitionRequest(url);

            recognizer.GetRecognitionTask(request, (SFSpeechRecognitionResult result, NSError err) =>
            {
                // Was there an error?
                if (err != null)
                {
                    var alertViewController = UIAlertController.Create("Error",
                                                                       $"An error recognizing speech ocurred: {err.LocalizedDescription}",
                                                                       UIAlertControllerStyle.Alert);
                    PresentViewController(alertViewController, true, null);
                }
                else
                {
                    if (result.Final)
                    {
                        InvokeOnMainThread(() =>
                        {
                            SpeechToTextView.Text = result.BestTranscription.FormattedString;
                        });
                    }
                }
            });
        }
Esempio n. 10
0
        public override async Task <AccessState> RequestAccess()
        {
            var status = AccessState.Available;

            if (!UIDevice.CurrentDevice.CheckSystemVersion(10, 0))
            {
                status = AccessState.NotSupported;
            }

            else
            {
                var nativeStatus = SFSpeechRecognizer.AuthorizationStatus;
                if (nativeStatus != SFSpeechRecognizerAuthorizationStatus.NotDetermined)
                {
                    status = FromNative(nativeStatus);
                }

                else
                {
                    var tcs = new TaskCompletionSource <AccessState>();
                    SFSpeechRecognizer.RequestAuthorization(x => tcs.SetResult(FromNative(x)));
                    status = await tcs.Task.ConfigureAwait(false);
                }
            }
            return(status);
        }
        public Task RequestAccess()
        {
            var tcs = new TaskCompletionSource <object>();

            SFSpeechRecognizer.RequestAuthorization(_ => {
                tcs.SetResult(null);
            });

            return(tcs.Task);
        }
Esempio n. 12
0
        private void ResetSpeechToText()
        {
            _speechRecognizer  = new SFSpeechRecognizer();
            _speechAudioEngine = new AVAudioEngine();

            _speechRecognitionRequest = new SFSpeechAudioBufferRecognitionRequest();
            _speechRecognitionRequest.ShouldReportPartialResults = true;
            _speechRecognitionRequest.TaskHint          = SFSpeechRecognitionTaskHint.Search;
            _speechRecognitionRequest.ContextualStrings = new string[] { "for", "the", "a", "an" };

            _speechRecognitionTask = null;
        }
 /// <inheritdoc />
 public VoiceToTextServiceImpl()
 {
     try
     {
         _audioEngine      = new AVAudioEngine();
         _speechRecognizer = new SFSpeechRecognizer();
     }
     catch (Exception ex)
     {
         System.Diagnostics.Debug.WriteLine(ex);
     }
 }
        private void initSpeakerButton()
        {
            SpeakerButton.Enabled = false; //Click once to start recording, click twice to end recording

            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus auth) =>
            {
                bool buttonIsEnabled = false;
                switch (auth)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    buttonIsEnabled     = true;
                    var node            = audioEngine.InputNode;
                    var recordingFormat = node.GetBusOutputFormat(0);
                    node.InstallTapOnBus(0, 1024, recordingFormat, (AVAudioPcmBuffer buffer, AVAudioTime when) =>
                    {
                        recognitionRequest.Append(buffer);
                    });
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    buttonIsEnabled = false;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    buttonIsEnabled = false;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    buttonIsEnabled = false;
                    break;
                }

                InvokeOnMainThread(() => { SpeakerButton.Enabled = buttonIsEnabled; });
            });

            //Event triggered when the button is pressed
            SpeakerButton.TouchUpInside += delegate
            {
                if (audioEngine.Running == true)
                {
                    StopRecording();
                    SpeakerButton.Highlighted = false;
                }
                else
                {
                    StartRecording();
                    resetTexts();
                    YouAskedLabel.Text        = "Listening...";
                    SpeakerButton.Highlighted = true;
                }
            };
        }
Esempio n. 15
0
 public void AvailabilityDidChange(SFSpeechRecognizer speechRecognizer, bool available)
 {
     if (available)
     {
         recordButton.Enabled = true;
         recordButton.SetTitle("Start Recording", UIControlState.Normal);
     }
     else
     {
         recordButton.Enabled = false;
         recordButton.SetTitle("Recognition not available", UIControlState.Disabled);
     }
 }
Esempio n. 16
0
        public SpeechToText()
        {
            _audioEngine              = new AVAudioEngine();
            _speechRecognizer         = new SFSpeechRecognizer();
            _speechRecognitionRequest = new SFSpeechAudioBufferRecognitionRequest();
            _speechRecognitionTask    = new SFSpeechRecognitionTask();

            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) =>
            {
                // We're going to assume that you've selected to authorise the request, otherwise,
                // you're wasting your own time.
            });
        }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            // Register with app delegate
            ThisApp.Controller = this;

            RecognizeButton.TouchUpInside += RecognizeButtonTouchUpInside;
            RecognizeButton.Enabled        = false;
            SFSpeechRecognizer.RequestAuthorization(HandleVoiceAuthorization);

            SetupUserActionSearch();

            SetupCoreSpotlightSearch();
        }
Esempio n. 18
0
        private void StartSpeechRecognizer()
        {
            if (!recording)
            {
                speechRecognizer = new SFSpeechRecognizer();
                node             = audioEngine.InputNode;
                var recordingFormat = node.GetBusOutputFormat(0);
                liveSpeechRequest = new SFSpeechAudioBufferRecognitionRequest();

                node.InstallTapOnBus(0, 1024, recordingFormat,
                                     (AVAudioPcmBuffer buffer, AVAudioTime when) =>
                {
                    liveSpeechRequest.Append(buffer);
                });
                recording = true;

                audioEngine.Prepare();
                audioEngine.StartAndReturnError(out NSError error);
                if (error != null)
                {
                    return;
                }

                Timer timer = new Timer(2000);
                timer.Start();
                timer.Elapsed  += EndRecognition;
                RecognitionTask = speechRecognizer.GetRecognitionTask(liveSpeechRequest,
                                                                      (SFSpeechRecognitionResult result, NSError err) =>
                {
                    if (err != null)
                    {
                        Recorded?.Invoke("");
                        return;
                    }
                    else
                    {
                        lastSpokenString = result.BestTranscription.FormattedString;
                        timer.Stop();
                        timer.Interval = 2000;
                        timer.Start();
                    }
                });
            }
            else
            {
                Recorded?.Invoke("");
            }
        }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            // Perform any additional setup after loading the view, typically from a nib.

            commandBtn.Enabled = false;

            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus auth) =>
            {
                bool buttonIsEnabled = false;
                switch (auth)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    buttonIsEnabled     = true;
                    var node            = audioEngine.InputNode;
                    var recordingFormat = node.GetBusOutputFormat(0);
                    node.InstallTapOnBus(0, 1024, recordingFormat, (AVAudioPcmBuffer buffer, AVAudioTime when) =>
                    {
                        recognitionRequest.Append(buffer);
                    });
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    buttonIsEnabled = false;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    buttonIsEnabled = false;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    buttonIsEnabled = false;
                    break;
                }

                InvokeOnMainThread(() => { commandBtn.Enabled = buttonIsEnabled; });
            });
            _libVLC      = new LibVLC();
            _mediaPlayer = new LibVLCSharp.Shared.MediaPlayer(_libVLC);

            _videoView = new VideoView {
                MediaPlayer = _mediaPlayer
            };

            _videoView.Frame = new CoreGraphics.CGRect(0, 0, this.View.Bounds.Size.Width, this.View.Bounds.Size.Height / 2);
            View.AddSubview(_videoView);
            _videoView.MediaPlayer.Play(new Media(_libVLC, VIDEO_URL, FromType.FromLocation));
        }
        public void RequestPemission()
        {
            request = PermissionRequest.Pending;
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) => {
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    request = PermissionRequest.Allowed;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    request = PermissionRequest.Pending;
                    break;
                }
            });
        }
Esempio n. 21
0
            static void StopInstances()
            {
                AudioEngine?.InputNode?.RemoveTapOnBus(0);
                AudioEngine?.Stop();
                AudioEngine?.Dispose();
                AudioEngine = null;

                LiveSpeechRequest?.EndAudio();
                LiveSpeechRequest?.Dispose();
                LiveSpeechRequest = null;

                SpeechRecognizer?.Dispose();
                SpeechRecognizer = null;

                //Timer?.Dispose();
                //Timer = null;
            }
Esempio n. 22
0
        public string RecognizeVoice(string path, Editor editor)
        {
            SFSpeechRecognizer recognizer = new SFSpeechRecognizer(NSLocale.CurrentLocale);

            // Is the default language supported?
            if (recognizer == null)
            {
                // No, return to caller
                return("recognizer is null");
            }

            // Is recognition available?
            if (!recognizer.Available)
            {
                // No, return to caller
                return("recognizer is not available");
            }
            isTranslatingDone = false;
            NSUrl url = new NSUrl(path, false);
            SFSpeechUrlRecognitionRequest request = new SFSpeechUrlRecognitionRequest(url);
            SFSpeechRecognitionTask       sptask  = recognizer.GetRecognitionTask(request, (SFSpeechRecognitionResult result, NSError err) =>
            {
                if (err != null)
                {
                    resultString      = err.Description;
                    isTranslatingDone = true;
                    editor.Text       = resultString;
                }
                else
                {
                    if (result != null)
                    {
                        if (result.Final)
                        {
                            resultString      = result.BestTranscription.FormattedString;
                            isTranslatingDone = true;
                            // call back??
                            editor.Text = resultString;
                        }
                    }
                }
            });

            return("cannot recognize");
        }
        public async override void ViewDidLoad()
        {
            base.ViewDidLoad();

            Title = "Translate";

            this.AutomaticallyAdjustsScrollViewInsets = false;

            speechRecognizer          = new SFSpeechRecognizer(new NSLocale("en-US"));
            speechRecognizer.Delegate = this;

            englishText.Text    = "";
            translatedText.Text = "";

            await AskPermissions();

            askQuestion.TouchUpInside += AskQuestion_TouchUpInside;
        }
Esempio n. 24
0
        internal static Task <PermissionStatus> RequestSpeechPermission()
        {
            if (SpeechPermissionStatus != PermissionStatus.Unknown)
            {
                return(Task.FromResult(SpeechPermissionStatus));
            }


            if (!UIDevice.CurrentDevice.CheckSystemVersion(10, 0))
            {
                return(Task.FromResult(PermissionStatus.Unknown));
            }

#if __IOS__
            var tcs = new TaskCompletionSource <PermissionStatus>();

            SFSpeechRecognizer.RequestAuthorization(status =>
            {
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    tcs.TrySetResult(PermissionStatus.Granted);
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    tcs.TrySetResult(PermissionStatus.Denied);
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    tcs.TrySetResult(PermissionStatus.Restricted);
                    break;

                default:
                    tcs.TrySetResult(PermissionStatus.Unknown);
                    break;
                }
            });
            return(tcs.Task);
#elif __TVOS__
            return(Task.FromResult(PermissionStatus.Granted));
#endif
        }
Esempio n. 25
0
        private void AskForSpeechPermission()
        {
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) =>
            {
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    MessagingCenter.Send <ISpeechToText>(this, "Authorized");
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    throw new Exception("Audio permission denied");

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    throw new Exception("Audio permission not available");

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    throw new Exception("Audio permission denied");
                }
            });
        }
Esempio n. 26
0
        private void AskForSpeechPermission()
        {
            SFSpeechRecognizer.RequestAuthorization((SFSpeechRecognizerAuthorizationStatus status) =>
            {
                switch (status)
                {
                case SFSpeechRecognizerAuthorizationStatus.Authorized:
                    _isAuthorized = true;
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Denied:
                    break;

                case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                    break;

                case SFSpeechRecognizerAuthorizationStatus.Restricted:
                    break;
                }
            });
        }
Esempio n. 27
0
        public override IObservable <AccessState> RequestAccess() => Observable.Create <AccessState>(ob =>
        {
            if (!UIDevice.CurrentDevice.CheckSystemVersion(10, 0))
            {
                ob.Respond(AccessState.NotSupported);
            }

            else
            {
                var status = SFSpeechRecognizer.AuthorizationStatus;
                if (status != SFSpeechRecognizerAuthorizationStatus.NotDetermined)
                {
                    ob.Respond(FromNative(status));
                }

                else
                {
                    SFSpeechRecognizer.RequestAuthorization(x => ob.Respond(FromNative(x)));
                }
            }
            return(Disposable.Empty);
        });
Esempio n. 28
0
            static Task DoStart()
            {
                if (Device.OS.IsBeforeiOS(10))
                {
                    throw new Exception("This feature is not supported in this device. Please upgrade your iOS.");
                }

                SFSpeechRecognizer.RequestAuthorization(status =>
                {
                    if (status == SFSpeechRecognizerAuthorizationStatus.Authorized)
                    {
                        StartRecording();
                    }
                    else
                    {
                        Stop();
                        throw new Exception("Speech recognition authorization request was denied.");
                    }
                });

                return(Task.CompletedTask);
            }
        private bool AskForSpeechPermission()
        {
            var authorizationStatus = SFSpeechRecognizerAuthorizationStatus.Denied;

            SFSpeechRecognizer.RequestAuthorization(status => { authorizationStatus = status; });

            switch (authorizationStatus)
            {
            case SFSpeechRecognizerAuthorizationStatus.Authorized:
                return(true);

            case SFSpeechRecognizerAuthorizationStatus.Denied:
                throw new AccessViolationException("User denied access to speech recognition");

            case SFSpeechRecognizerAuthorizationStatus.NotDetermined:
                throw new AccessViolationException("Speech recognition restricted on this device");

            case SFSpeechRecognizerAuthorizationStatus.Restricted:
                throw new AccessViolationException("Speech recognition not yet authorized");
            }

            return(false);
        }
Esempio n. 30
0
            static void StartRecording()
            {
                lock (Lock)
                {
                    if (SpeechRecognizer == null)
                    {
                        SpeechRecognizer  = new SFSpeechRecognizer();
                        LiveSpeechRequest = new SFSpeechAudioBufferRecognitionRequest();
                    }

                    var audioSession = AVAudioSession.SharedInstance();

                    audioSession.SetCategory(AVAudioSessionCategory.PlayAndRecord);
                    audioSession.SetMode(AVAudioSession.ModeDefault, out NSError error);
                    audioSession.OverrideOutputAudioPort(AVAudioSessionPortOverride.Speaker, out NSError speakerError);
                    audioSession.SetActive(true);

                    if (LogErrorAndStop(error) || LogErrorAndStop(speakerError))
                    {
                        return;
                    }

                    AudioEngine = new AVAudioEngine();
                    var node = AudioEngine.InputNode;

                    LiveSpeechRequest.ShouldReportPartialResults = true;

                    RecognitionTask = SpeechRecognizer.GetRecognitionTask(LiveSpeechRequest, (SFSpeechRecognitionResult result, NSError err) =>
                    {
                        if (LogErrorAndStop(err))
                        {
                            return;
                        }

                        var currentText = result.BestTranscription.FormattedString;

                        if (currentText.HasValue())
                        {
                            Listeners?.Invoke(currentText, result.Final);
                        }

                        if (IsContinuous)
                        {
                            Timer = new System.Timers.Timer(20000)
                            {
                                Enabled = true
                            };
                            Timer.Elapsed += (s, ev) =>
                            {
                                StopInstances();
                                StartRecording();
                            };

                            Timer.Start();
                        }
                    });

                    var recordingFormat = node.GetBusOutputFormat(0);
                    node.InstallTapOnBus(0, 1024, recordingFormat, (AVAudioPcmBuffer buffer, AVAudioTime when) =>
                    {
                        LiveSpeechRequest.Append(buffer);
                    });

                    if (AudioEngine == null)
                    {
                        Stop();
                        return;
                    }

                    AudioEngine?.Prepare();
                    AudioEngine?.StartAndReturnError(out error);

                    if (LogErrorAndStop(error))
                    {
                        return;
                    }
                }
            }
Esempio n. 31
0
		public void AvailabilityDidChange (SFSpeechRecognizer speechRecognizer, bool available)
		{
			if (available) {
				recordButton.Enabled = true;
				recordButton.SetTitle ("Start Recording", UIControlState.Normal);
			} else {
				recordButton.Enabled = false;
				recordButton.SetTitle ("Recognition not available", UIControlState.Disabled);
			}
		}