} // end SetProgressText #endregion #endregion #region Event Raisers #region RaiseCompletedEvent /// <summary> /// Completed event for the Process indicating that the task is complete, which raises /// the completed event for an object of this class to indicate completion of the task. /// </summary> /// <param name="sender"></param> /// <param name="e">Indicates the success rate of the data export.</param> void RaiseCompletedEvent(object sender, ResultsEventArgs e) { if (Completed != null) { Invoke(new TaskCompleteEventHandler(Completed), new object[] { sender, e }); } // end if } // end RaiseCompletedEvent
} // end ShowFinishedPanel #endregion #region LogResults /// <summary> /// Logs the results to the DataExporter-RFS system log. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void LogResults(object sender, ResultsEventArgs e) { Logging log = new Logging(LogType.System, "DataExporter-RFS"); log.WriteLine(0, "RFSmart Version 3 data export" + (e.Result ? "was Successful!" : "Failed!")); log.WriteLine(0, Environment.NewLine + e.Summary); } // end LogResults
protected void SpeechRecognizerOnResults(object sender, ResultsEventArgs e) { var recognitionResults = e.Results.GetStringArrayList(SpeechRecognizer.ResultsRecognition); var confidenceScores = e.Results.GetFloatArray(SpeechRecognizer.ConfidenceScores); if (recognitionResults == null) { return; } Console.WriteLine(string.Join(" ", recognitionResults)); Console.WriteLine(string.Join(" ", confidenceScores)); int bestResultIndex = GetConfidenceResultIndex(recognitionResults, confidenceScores); if (bestResultIndex != -1 && confidenceScores[bestResultIndex] > 0.0) { this.AccuracyProgressBar.Progress = (int)(confidenceScores[bestResultIndex] * 100); this.CorrentAttempt(); } else { this.AccuracyProgressBar.Progress = 0; this.IncorrectAttempt(); } this.DisplayStartMicrophone(); }
private void _speechRecognizer_Results(object sender, ResultsEventArgs e) { try { SpeechRecognitionResult?.Invoke(this, new SpeechRecognitionEventArgs(e.Results.GetStringArrayList(SpeechRecognizer.ResultsRecognition).ToList())); } catch (Exception) { } }
private void Recognizer_Results(object sender, ResultsEventArgs e) { IList <string> results = e.Results.GetStringArrayList(SpeechRecognizer.ResultsRecognition); ProcessStrings(results); Console.WriteLine("Result received."); BeginProcessing(); }
private void SpeechRecognizerOnResults(object sender, ResultsEventArgs resultsEventArgs) { var matches = resultsEventArgs.Results.GetStringArrayList(SpeechRecognizer.ResultsRecognition); _binder.SpeechRecognitionListener?.OnSpeechRecognitionResults(matches); Log.Info("MyApp", $">>> {DateTime.Now.TimeOfDay} >>> Speech results: {matches.Count}"); StartListening(); }
} // end ShowProgressPanel #endregion #region ConstructResults /// <summary> /// Completed event handler, which is called when the data export is complete. This sets /// the active view of the wizard to allow the user to proceed. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void ConstructResults(object sender, ResultsEventArgs e) { Cursor = System.Windows.Forms.Cursors.Default; SetActiveView(true, false); FinishedPnl.ConstructResults(e.Result, e.Summary); // Called to close the DB Connections EnvironmentPnl.Dispose(); } // end ConstructResults
void SpeechRecognizer_Results(object sender, ResultsEventArgs e) { if (recognitionActive) { recognitionActive = false; var recognitionResults = e.Results.GetStringArrayList(SpeechRecognizer.ResultsRecognition); float[] rates = null; if (Build.VERSION.SdkInt >= BuildVersionCodes.IceCreamSandwich) { rates = e.Results.GetFloatArray(SpeechRecognizer.ConfidenceScores); } if (recognitionResults == null || recognitionResults.Count == 0) { // empty response FireOnResult(new AIResponse()); } else { var aiRequest = new AIRequest(); if (rates != null) { aiRequest.Query = recognitionResults.ToArray(); aiRequest.Confidence = rates; } else { aiRequest.Query = new [] { recognitionResults[0] }; } if (requestExtras != null) { requestExtras.CopyTo(aiRequest); } SendRequest(aiRequest); ClearRecognizer(); } } }
private async void MSpeechRecognizer_Results(object sender, ResultsEventArgs e) { var matches = e.Results.GetStringArrayList(SpeechRecognizer.ResultsRecognition); if (matches.Count != 0) { await ProcessCommand(WithoutAccents(matches.First().ToLower().Trim())); //string textInput = mFilterText.Text + matches[0]; //mFilterText.Text = textInput; } else { await IDoNotUnderstandAsync(); // mFilterText.Text = "Nothing was recognized"; } }
/// <summary> /// Handles the end of the async query /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void OnFinishedGisQuery(object sender, ResultsEventArgs e) { // Send response to the requesting component OnFinishedQuery(e); }
protected virtual void OnFinishedQuery(ResultsEventArgs e) { if (finishedQuery != null) { finishedQuery(this, e); } }
public void OnResults(object sender, ResultsEventArgs e) { Debug.WriteLine("Speech Results"); this.SendResults(e.Results, this.FinalResults); }
void results_OnSelect(Results source, ResultsEventArgs e) { if (currentOperation < OPERATIONS && e.Result == temporalOperations[currentOperation].Result) { operations[currentOperation].AddResult(); NextOperation(); } }
protected virtual void OnFinishedAttributeQuery(ResultsEventArgs e) { if (finishedFill != null) { finishedFill(this, e); } }
/// <summary> /// Query task is complete, send an event to the caller with the result in the event argument. /// </summary> /// <param name="sender"></param> /// <param name="args"></param> public void AttributeQueryTask_ExecuteCompleted(object sender, QueryEventArgs args) { try { FeatureSet featureSet = args.FeatureSet; int iD = (int)args.UserState; ResultsEventArgs eventArgs = new ResultsEventArgs(featureSet, iD); OnFinishedAttributeQuery(eventArgs); } catch (Exception ex) { messageBoxCustom.Show(String.Format("AttributeQueryTask_ExecuteCompleted-{0}", ex.Message), GisTexts.SevereError, MessageBoxCustomEnum.MessageBoxButtonCustom.Ok); } }