コード例 #1
0
        public void OnProcessOutput(string line)
        {
            if (!IsListening && line.Trim().Contains("So, the first input will not be recognized."))
            {
                IsListening = true;
                Listening?.Invoke();
            }
            else if (line.Trim().StartsWith("sentence1: <s> "))
            {
                IsPass1Recognizing = false;
                IsPass1Complete    = true;
                Pass1Text          = line.Replace("sentence1: <s> ", "").Replace("</s>", "").Trim();
                Debug("Recognized text: {0}", Pass1Text);
                Recognized?.Invoke(Pass1Text);
            }

            /*
             * if (line.Trim().StartsWith("pass1_best"))
             * {
             *  IsListening = false;
             *  IsPass1Recognizing = true;
             *  Pass1RecognizingOp = Begin("Recognizing");
             * }
             * else if (line.Trim().StartsWith("sentence1: <s> "))
             * {
             *  Pass1RecognizingOp.Complete();
             *  IsPass1Recognizing = false;
             *  IsPass1Complete = true;
             *  Pass1Text = line.Replace("sentence1: <s> ", "").Replace("</s>", "").Trim();
             *  Info("Recognized text: {0}", Pass1Text);
             *  Info("Listening...");
             * }*/
        }
コード例 #2
0
        private void Update()
        {
            Capture?.Dispose();
            Capture = BitmapFactory.CreateScreenCapture(CaptureX, CaptureY, CaptureWidth, CaptureHeight, CaptureGrayScale, CaptureScale);
            using (var page = TesseractEngine.Process(Capture, PageSegMode.SingleChar | PageSegMode.SingleLine))
            {
                decimal value;
                if (decimal.TryParse(page.GetText(), NumberStyles.Number, CultureInfo.InvariantCulture, out value))
                {
                    Value = value;
                    LogBuilder.AppendLine(string.Format("[{0}] : {1}", DateTime.Now.ToString(), value));
                    SpeechMessage = new Prompt(string.Format("{0}\n{1}", Title, value));

                    Recognized?.Invoke(this, value, Capture);
                }
                else
                {
                    LogBuilder.AppendLine(string.Format("[{0}] : {1}", DateTime.Now.ToString(), Resources.Unrecognized));
                    SpeechMessage = new Prompt(string.Format("{0}\n{1}", Title, Resources.Unrecognized));
                    Unrecognized?.Invoke(this, Capture);
                }
                if (NotifyEnabled)
                {
                    SpeechSynthesizer.SpeakAsync(SpeechMessage);
                }
            }
        }
コード例 #3
0
 private void RegisterEvent()
 {
     _eventDetectedCallback = (IntPtr trigger, IntPtr source, int streamId, IntPtr result, IntPtr _) =>
     {
         Recognized?.Invoke(this, CreatePersonRecognizedEventArgs(result));
     };
 }
コード例 #4
0
 internal protected override void Update(float delta)
 {
     if (Input.DroppedFiles.Count > 0)
     {
         Recognized?.Invoke(Input.DroppedFiles);
     }
 }
コード例 #5
0
ファイル: Speech.cs プロジェクト: BriefRobotics/kuka
 public Speech(GrammarBuilder grammar)
 {
     synth.SelectVoiceByHints(VoiceGender.Female);
     reco.SetInputToDefaultAudioDevice();
     reco.LoadGrammar(new Grammar(grammar));
     reco.RecognizeAsync(RecognizeMode.Multiple);
     reco.SpeechRecognized += (_, e) => Recognized?.Invoke(this, e.Result);
 }
コード例 #6
0
        public void OnResults(Bundle results)
        {
            var matches = results.GetStringArrayList(SpeechRecognizer.ResultsRecognition);

            if (matches != null && matches.Count > 0)
            {
                Recognized?.Invoke(this, matches[0]);
            }
        }
コード例 #7
0
 private int ComputeHashCode()
 {
     return(HashCode.Compute(
                DottedRule.GetHashCode(),
                Origin.GetHashCode(),
                Recognized.GetHashCode(),
                Reduction.GetHashCode(),
                Index.GetHashCode()));
 }
コード例 #8
0
 internal protected override bool OnUpdate()
 {
     if (Input.DroppedFiles.Count > 0)
     {
         Recognized?.Invoke(Input.DroppedFiles);
         return(true);
     }
     return(false);
 }
コード例 #9
0
 public Recognizer()
 {
     _engine = new SpeechRecognitionEngine();
     _engine.SpeechRecognized   += (s, e) => Recognized?.Invoke(e.Result);
     _engine.SpeechHypothesized += (s, e) => Hypothesized?.Invoke(e.Result);
     _engine.LoadGrammar(new DictationGrammar());
     _engine.SetInputToDefaultAudioDevice();
     _engine.RecognizeAsync(RecognizeMode.Multiple);
 }
コード例 #10
0
ファイル: PersonRecognizer.cs プロジェクト: tscholb/TizenFX
 internal override void OnEventDetected(IntPtr trigger, IntPtr source, int streamId,
                                        IntPtr result, IntPtr _)
 {
     try
     {
         Recognized?.Invoke(this, CreatePersonRecognizedEventArgs(result));
     }
     catch (Exception e)
     {
         MultimediaLog.Error(MediaVisionLog.Tag, "Failed to invoke Recognized event.", e);
     }
 }
コード例 #11
0
 private void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
 {
     if (e.Result.Confidence > 0.7)
     {
         Recognized?.Invoke(e.Result.Text);
         var command = CommandList.Commands.Where(c => c.Command.Equals(e.Result.Text)).FirstOrDefault();
         if (command != null)
         {
             command.Execute();
         }
     }
 }
コード例 #12
0
        public override bool Equals(object obj)
        {
            if (obj is null)
            {
                return(false);
            }
            if (!(obj is TransitionState transitionState))
            {
                return(false);
            }

            return(GetHashCode() == transitionState.GetHashCode() &&
                   Recognized.Equals(transitionState.Recognized) &&
                   Index == transitionState.Index);
        }
コード例 #13
0
        public override bool Equals(object obj)
        {
            if (obj == null)
            {
                return(false);
            }
            var transitionState = obj as TransitionState;

            if (transitionState == null)
            {
                return(false);
            }

            return(GetHashCode() == transitionState.GetHashCode() &&
                   Recognized.Equals(transitionState.Recognized) &&
                   Index == transitionState.Index);
        }
コード例 #14
0
 public void SetGrammar(GrammarBuilder grammar)
 {
     try
     {
         var d = new GrammarBuilder();
         d.AppendDictation();
         var g = new Grammar(new Choices(grammar, d));
         reco.SetInputToDefaultAudioDevice();
         reco.UnloadAllGrammars();
         reco.LoadGrammar(new Grammar(grammar));
         reco.RecognizeAsync(RecognizeMode.Multiple);
         reco.SpeechRecognized += (_, e) => Recognized?.Invoke(this, e.Result);
     }
     catch (Exception ex)
     {
         Console.WriteLine($"ERROR: {ex.Message}");
     }
 }
コード例 #15
0
        public override int GetHashCode()
        {
            int hash = 1;

            if (ClientId != 0)
            {
                hash ^= ClientId.GetHashCode();
            }
            if (Recognized.Length != 0)
            {
                hash ^= Recognized.GetHashCode();
            }
            if (timestamp_ != null)
            {
                hash ^= Timestamp.GetHashCode();
            }
            return(hash);
        }
コード例 #16
0
        private bool RecognizeFamiliarPerson(Image <Gray, byte> detectedFace)
        {
            var result = _faceRecognizer.Predict(detectedFace);

            if (result.Label != -1)
            {
                var human = ServicesWorker.GetInstance <HumanService>().GetHumanFromId(result.Label);
                if (human != null)
                {
                    Recognized?.Invoke(human.Name, result.Distance);
                }
                else
                {
                    Recognized?.Invoke(result.Label.ToString(), result.Distance);
                }
                return(true);
            }
            return(false);
        }
コード例 #17
0
ファイル: SecondForm.cs プロジェクト: Chistogov/classSharp
        private void button1_Click(object sender, EventArgs e)
        {
            if (current_picture != null)
            {
                if (StaticInfo.user != null)
                {
                    List <Symptom> symptom  = db.Symptoms.ToList();
                    var            controls = groupBox1.Controls;
                    foreach (var control in controls)
                    {
                        if (control is CheckBox)
                        {
                            if ((control as CheckBox).Checked)
                            {
                                Recognized rec = new Recognized();
                                rec.Picture = current_picture;
                                rec.Date    = DateTime.Now;
                                rec.Symptom = symptom.Where(p => p.Symptom_name == (control as CheckBox).Text).FirstOrDefault();
                                rec.User    = db.Users.ToList().Where(p => p.User_name == StaticInfo.user.User_name).FirstOrDefault();
                                db.Recognized_.Add(rec);
                                db.SaveChanges();
                            }
                        }
                    }

                    current_picture.Recognized = true;
                    db.SaveChanges();
                }
                else
                {
                    LoginForm form = new LoginForm();
                    form.Show();
                    return;
                }
            }
            if (current_picture != null)
            {
                fileName_old.Add(current_picture.Pic_name);
            }
            statusLabel.Text = fileName_old.Count() + "/" + filesNames.Count();
            getNextPic();
        }
コード例 #18
0
        private void button1_Click_1(object sender, EventArgs e)
        {
            if (current_picture != null)
            {
                if (StaticInfo.user != null)
                {
                    Text = "Классификатор. Клинка \"Ухо, Горло, Нос\" / " + StaticInfo.user.User_name;
                    List <Symptom> symptom  = db.Symptoms.ToList();
                    var            controls = groupBox1.Controls;
                    foreach (var control in controls)
                    {
                        if (control is CheckBox)
                        {
                            if ((control as CheckBox).Checked)
                            {
                                Recognized rec = new Recognized();
                                rec.Picture = current_picture;
                                rec.Date    = DateTime.Now;
                                rec.Symptom = symptom.Where(p => p.Symptom_name == (control as CheckBox).Text).FirstOrDefault();
                                rec.User    = db.Users.ToList().Where(p => p.User_name == StaticInfo.user.User_name).FirstOrDefault();
                                db.Recognized_.Add(rec);
                                db.SaveChanges();
                            }
                        }
                    }

                    current_picture.Recognized = true;
                    current_picture.Skipped    = false;
                    db.SaveChanges();
                }
                else
                {
                    LoginForm form = new LoginForm();
                    form.Show();
                    return;
                }
            }
            refreshTags();
            getNextPic();
        }
コード例 #19
0
        /// <summary>
        /// Called when Azure's Recognizing or Recognized events have been invoked.
        /// Passes the event on to <see cref="Recognized"/>
        /// </summary>
        /// <param name="e"></param>
        /// <param name="final"></param>
        protected void OnSpeechEvent(SpeechRecognitionEventArgs e, bool final)
        {
            var language = AutoDetectSourceLanguageResult.FromResult(e.Result);

            string strEvent = final ? "Recognized" : "Recognizing";

            Logger.LogTrace($"{strEvent} ({language.Language}): {e.Result.Text}");

            if (string.IsNullOrWhiteSpace(e.Result.Text))
            {
                //this happens occasionally
                return;
            }

            var recognizedEvent = ServiceProvider.GetService(typeof(SpeechRecognizedEvent)) as SpeechRecognizedEvent;

            recognizedEvent.Preliminary = !final;
            recognizedEvent.ResultID    = e.Result.OffsetInTicks.ToString();
            recognizedEvent.Text        = e.Result.Text;
            recognizedEvent.Language    = language.Language;

            Recognized?.Invoke(this, recognizedEvent);
        }
コード例 #20
0
 /// <summary>
 /// Make gesture recognized
 /// </summary>
 protected virtual void Recognize()
 {
     Recognized?.Invoke(this, EventArgs.Empty);
 }
        protected async Task MessageLoop(CancellationToken ct)
        {
            ProvidingState = ProvidingStateType.Initialized;

            try
            {
                Func <Task <bool> > connectAction = async() =>
                {
                    var connectionResult = await Connect(ct);

                    if (connectionResult.isSuccess == false)
                    {
                        LastErrorString = connectionResult.message;
                        ErrorOccured?.Invoke(this, LastErrorString);
                        return(false);
                    }
                    Trace?.Invoke(this, "Connection complete.");
                    return(true);
                };

                sendQueue.Clear();

                CancellationTokenSource receiveTokenSource = new CancellationTokenSource();
                CancellationToken       receiveToken       = receiveTokenSource.Token;
                Task?receiveTask = null;

                char[] charsToTrim = { ' ', '\x00' };
                while (!ct.IsCancellationRequested)
                {
                    if (ProvidingState == ProvidingStateType.Initialized || (receiveTask != null && receiveTask.Status == TaskStatus.RanToCompletion))
                    {
                        Trace?.Invoke(this, "Try to connect.");
                        if (receiveTask != null && receiveTask.Status == TaskStatus.Running)
                        {
                            receiveTokenSource?.Cancel();
                            await receiveTask;
                        }

                        var connectResult = await connectAction();

                        if (!connectResult)
                        {
                            break;
                        }

                        receiveTask?.Dispose();
                        receiveTokenSource = new CancellationTokenSource();
                        receiveToken       = receiveTokenSource.Token;
                        receiveTask        = Task.Run(() => ReceiveLoop(receiveToken), receiveToken);
                    }

                    string receiveData;
                    if (receiveQueue.TryDequeue(out receiveData))
                    {
                        receiveData = receiveData.Trim(charsToTrim);
                        //Debug.WriteLine(String.Format("Recieve: {0}", receiveData));

                        // セッションタイムアウト等による切断
                        // 実際は強制的に切断されていたりするのでここを通らなかったりする
                        if (receiveData.StartsWith("p") && receiveData.Length > 3)
                        {
                            Debug.WriteLine("Timeout occured.");
                            Trace?.Invoke(this, receiveData.Substring(1).Trim());
                            RecognizingState = RecognizingStateType.NotRecognizing;
                            DetectingState   = DetectingStateType.NotDetecting;
                            ProvidingState   = ProvidingStateType.Initialized;
                            continue;
                        }

                        // エラー処理
                        if ((receiveData.StartsWith("e") && receiveData.Length > 3))
                        {
                            LastErrorString = receiveData;
                            ErrorOccured?.Invoke(this, receiveData.Substring(1));
                            ProvidingState   = ProvidingStateType.Error;
                            RecognizingState = RecognizingStateType.NotRecognizing;
                            DetectingState   = DetectingStateType.NotDetecting;
                            break;
                        }

                        // 発話区間開始
                        if (receiveData.StartsWith("S"))
                        {
                            uint startMiliSec;
                            if (uint.TryParse(receiveData.Substring(2), out startMiliSec))
                            {
                                //Debug.WriteLine(String.Format("S: {0}", receiveData));
                                VoiceStart?.Invoke(this, startMiliSec);
                            }
                            DetectingState = DetectingStateType.Detecting;
                        }

                        // 発話区間終了
                        if (receiveData.StartsWith("E"))
                        {
                            uint endMiliSec;
                            if (uint.TryParse(receiveData.Substring(2), out endMiliSec))
                            {
                                //Debug.WriteLine(String.Format("E: {0}", receiveData));
                                VoiceEnd?.Invoke(this, endMiliSec);
                            }
                            DetectingState = DetectingStateType.NotDetecting;
                        }

                        // 認識処理開始
                        if (receiveData.StartsWith("C"))
                        {
                            RecognizingState = RecognizingStateType.Recognizing;
                            RecognizeStarting?.Invoke(this, true);
                        }

                        // 認識処理返却
                        if (receiveData.StartsWith("U") || receiveData.StartsWith("A"))
                        {
                            try
                            {
                                //Debug.WriteLine(receiveData.Substring(2).Trim());
                                var result = JsonSerializer.Deserialize <SpeechRecognitionEventArgs>(receiveData.Substring(2), jsonSerializerOptions);
                                if (receiveData.StartsWith("U"))
                                {
                                    // 認識途中
                                    Recognizing?.Invoke(this, result);
                                }
                                else if (receiveData.StartsWith("A"))
                                {
                                    // 認識終了
                                    Recognized?.Invoke(this, result);
                                    RecognizingState = RecognizingStateType.NotRecognizing;
                                }
                            }
                            catch (JsonException ex)
                            {
                                Debug.WriteLine(ex.Message);
                            }
                        }
                    }

                    // 受信が異常終了していないか確認
                    if (receiveTask != null && receiveTask.Status == TaskStatus.Faulted)
                    {
                        Debug.WriteLine(String.Format("Loop:ReceiveWSException: {0}", receiveTask.Exception.InnerException.Message));
                        ErrorOccured?.Invoke(this, String.Format("ReceiveTaskException"));
                        break;
                    }

                    // 音声データを送る
                    byte[] sendData;
                    if (wsAmiVoice.State == WebSocketState.Open && sendQueue.TryDequeue(out sendData))
                    {
                        sendData = prefixC.Concat(sendData).ToArray();
                        if (sendData.Length == 0)
                        {
                            continue;
                        }
                        try
                        {
                            await wsAmiVoice.SendAsync(sendData, WebSocketMessageType.Binary, true, CancellationToken.None);
                        }
                        catch (Exception ex) when(ex is WebSocketException || ex is IOException)
                        {
                            var sendErrString = String.Format("Send:WebSocketException: {0}", ex.Message);

                            Trace?.Invoke(this, sendErrString);
                        }
                    }
                }

                // 終了処理
                byte[] endArray = new byte[] { (byte)CommandType.End };
                await wsAmiVoice.SendAsync(endArray, WebSocketMessageType.Text, true, CancellationToken.None);

                string disconnectionStr = "";
                while (wsAmiVoice.State == WebSocketState.Open && receiveTask != null && receiveTask.Status == TaskStatus.Running)
                {
                    if (!receiveQueue.TryDequeue(out disconnectionStr))
                    {
                        continue;
                    }

                    if (disconnectionStr.StartsWith("e") && disconnectionStr.Length == 1)
                    {
                        receiveTokenSource?.Cancel();
                        receiveTask.Wait(1000);
                        ProvidingState   = ProvidingStateType.Initialized;
                        RecognizingState = RecognizingStateType.NotRecognizing;
                        DetectingState   = DetectingStateType.NotDetecting;
                        RecognizeStopped?.Invoke(this, true);
                    }
                    else if (disconnectionStr.StartsWith("e"))
                    {
                        RecognizingState = RecognizingStateType.NotRecognizing;
                        DetectingState   = DetectingStateType.NotDetecting;
                        ProvidingState   = ProvidingStateType.Error;
                        LastErrorString  = disconnectionStr.Substring(2);
                        ErrorOccured?.Invoke(this, LastErrorString);
                    }
                }

                if (receiveTokenSource != null && !receiveTokenSource.IsCancellationRequested)
                {
                    receiveTokenSource.Cancel();
                    if (receiveTask != null && receiveTask.Status == TaskStatus.Running)
                    {
                        receiveTask.Wait(3000);
                    }
                }
            }
            catch (WebSocketException ex)
            {
                ErrorOccured?.Invoke(this, String.Format("Loop:WebSocketException: {0}", ex.Message));
            }
            finally
            {
                if (wsAmiVoice.State == WebSocketState.Open)
                {
                    try
                    {
                        await wsAmiVoice.CloseAsync(WebSocketCloseStatus.NormalClosure, "", CancellationToken.None);
                    }
                    catch (WebSocketException ex)
                    {
                        Debug.WriteLine(String.Format("Close:WebSocketException: {0} - {1}", ex.Message, wsAmiVoice.State.ToString()));
                    }
                }
            }

            Trace?.Invoke(this, "Disconnected.");
        }
コード例 #22
0
        async void ISpeechRecognition.StartListening()
        {
            _inactive = false;

            // Start recognition.
            try
            {
                if (_speechModule.TextToSpeech.IsSpeaking)
                {
                    _speechModule.LanguageModel.AI.Engine.Debugger.Log(
                        Galatea.Diagnostics.DebuggerLogLevel.Diagnostic,
                        "TTS is speaking; Listening paused...");
                }
                else
                {
                    //// Get out of this f*****g loop
                    //if (_isListening) return;

                    //_isListening = true;

                    // Start Listening
                    int ruleId = -1;
                    SpeechRecognitionStatus status = SpeechRecognitionStatus.Empty;
                    SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

                    // If successful, display the recognition result.
                    if (speechRecognitionResult.Status == SpeechRecognitionResultStatus.Success)
                    {
                        if (string.IsNullOrEmpty(speechRecognitionResult.Text))
                        {
                            ruleId = 0;
                            status = speechRecognitionResult.Status.Convert();
                        }
                    }
                    else
                    {
                        //resultTextBlock.Visibility = Visibility.Visible;
                        //resultTextBlock.Text = string.Format("Speech Recognition Failed, Status: {0}", speechRecognitionResult.Status.ToString());
                    }

                    // Fire Event
                    Recognized?.Invoke(this, new SpeechRecognizedEventArgs(ruleId, speechRecognitionResult.Text, null, status));

                    //_isListening = false;
                }
            }
            catch (TaskCanceledException exception)
            {
                // TaskCanceledException will be thrown if you exit the scenario while the recognizer is actively
                // processing speech. Since this happens here when we navigate out of the scenario, don't try to
                // show a message dialog for this exception.
                System.Diagnostics.Debug.WriteLine("TaskCanceledException caught while recognition in progress (can be ignored):");
                System.Diagnostics.Debug.WriteLine(exception.ToString());
            }
            catch (System.InvalidOperationException exception)
            {
                // No idea why it keeps throwing this Exception
                _speechModule.LanguageModel.AI.Engine.Debugger.Log(Galatea.Diagnostics.DebuggerLogLevel.Error, exception.Message);
                _speechModule.LanguageModel.AI.Engine.Debugger.Log(Galatea.Diagnostics.DebuggerLogLevel.StackTrace, exception.StackTrace);
            }
            catch (Exception exception)
            {
                string msg;
                // Handle the speech privacy policy error.
                if ((uint)exception.HResult == HResultPrivacyStatementDeclined)
                {
                    msg = Galatea.Globalization.RoboticsResources.SpeechRecognition_PrivacySettings_NotAccepted;
                    throw new TeaSpeechException(msg, exception);
                }
                //else
                //{
                //    msg = exception.Message;
                //}
                //var messageDialog = new Windows.UI.Popups.MessageDialog(msg, "Exception");
                //await messageDialog.ShowAsync();

                throw;
            }
        }
コード例 #23
0
 protected virtual void OnRecognized(RecognizedEventArgs args)
 {
     Recognized?.Invoke(this, args);
 }