/// <summary> /// Session stopped event handler. Set the TaskCompletionSource to 0, in order to trigger Recognition Stop /// </summary> private static void SessionStoppedEventHandler(SessionEventArgs e, RecognizerType rt, TaskCompletionSource <int> source) { Console.WriteLine( string.Format(CultureInfo.InvariantCulture, "Speech recognition: Session stopped event: {0}.", e.ToString())); source.TrySetResult(0); }
internal SapiRecognizer(RecognizerType type) { ISpRecognizer recognizer; try { if (type == RecognizerType.InProc) { recognizer = (ISpRecognizer) new SpInprocRecognizer(); } else { recognizer = (ISpRecognizer) new SpSharedRecognizer(); } _isSap53 = recognizer is ISpRecognizer2; } catch (COMException e) { throw RecognizerBase.ExceptionFromSapiCreateRecognizerError(e); } // Back out if the recognizer we have SAPI 5.1 if (!IsSapi53 && System.Threading.Thread.CurrentThread.GetApartmentState() == System.Threading.ApartmentState.STA) { // must be recreated on a different thread Marshal.ReleaseComObject(recognizer); _proxy = new SapiProxy.MTAThread(type); } else { _proxy = new SapiProxy.PassThrough(recognizer); } }
public static string getRecognizerTag(RecognizerType type) { switch (type) { case RecognizerType.JointOrientation: return("JointOrientationRecognizer"); case RecognizerType.LinearMovement: return("LinearMovementRecognizer"); case RecognizerType.AngularMovement: return("AngularMovementRecognizer"); case RecognizerType.FingerCount: return("FingerCountRecognizer"); case RecognizerType.TemplateRecording: return("TemplateRecognizer"); case RecognizerType.Combination: return("CombinationRecognizer"); default: return("JointRelationRecognizer"); } }
/// <summary> /// Logs the final recognition result /// </summary> private void RecognizedEventHandler(SpeechRecognitionEventArgs e, RecognizerType rt) { Console.WriteLine($" --- Final result received. Reason: {e.Result.Reason.ToString()}. --- "); if (!string.IsNullOrEmpty(e.Result.Text)) { var outputWithPerson = $"{_person}: {e.Result.Text}"; Console.WriteLine(outputWithPerson); _streamWriter.WriteLine(String.IsNullOrEmpty(_person) ? e.Result.Text : outputWithPerson); } }
/// <summary> /// Subscribes to Recognition Events /// Starts the Recognition and waits until final result is received, then Stops recognition /// </summary> /// <param name="recognizer">Recognizer object</param> /// <param name="recognizerType">Type of Recognizer</param> /// <param name="source">Task completion source</param> /// <value> /// <c>Base</c> if Baseline model; otherwise, <c>Custom</c>. /// </value> public async Task RunRecognizer(SpeechRecognizer recognizer, RecognizerType recognizerType, TaskCompletionSource <int> source) { //subscribe to events // ReSharper disable ImplicitlyCapturedClosure void RecognizingHandler(object sender, SpeechRecognitionEventArgs e) => RecognizingEventHandler(e, recognizerType); void RecognizedHandler(object sender, SpeechRecognitionEventArgs e) => RecognizedEventHandler(e, recognizerType); void CanceledHandler(object sender, SpeechRecognitionCanceledEventArgs e) => CanceledEventHandler(e, recognizerType, source); void SessionStartedHandler(object sender, SessionEventArgs e) => SessionStartedEventHandler(e, recognizerType); void SessionStoppedHandler(object sender, SessionEventArgs e) => SessionStoppedEventHandler(e, recognizerType, source); void SpeechStartDetectedHandler(object sender, RecognitionEventArgs e) => SpeechDetectedEventHandler(e, recognizerType, "start"); void SpeechEndDetectedHandler(object sender, RecognitionEventArgs e) => SpeechDetectedEventHandler(e, recognizerType, "end"); // ReSharper restore ImplicitlyCapturedClosure recognizer.Recognizing += RecognizingHandler; recognizer.Recognized += RecognizedHandler; recognizer.Canceled += CanceledHandler; recognizer.SessionStarted += SessionStartedHandler; recognizer.SessionStopped += SessionStoppedHandler; //start,wait,stop recognition await recognizer.StartContinuousRecognitionAsync().ConfigureAwait(false); await source.Task.ConfigureAwait(false); await recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false); recognizer.Recognizing -= RecognizingHandler; recognizer.Recognized -= RecognizedHandler; recognizer.Canceled -= CanceledHandler; recognizer.SessionStarted -= SessionStartedHandler; recognizer.SessionStopped -= SessionStoppedHandler; recognizer.SpeechStartDetected -= SpeechStartDetectedHandler; recognizer.SpeechEndDetected -= SpeechEndDetectedHandler; recognizer.SpeechStartDetected -= SpeechStartDetectedHandler; recognizer.SpeechEndDetected -= SpeechEndDetectedHandler; }
public AcrCloudRecognizer(IDictionary <string, object> config) { if (config.ContainsKey("host")) { _host = (string)config["host"]; } if (config.ContainsKey("access_key")) { _accessKey = (string)config["access_key"]; } if (config.ContainsKey("access_secret")) { _accessSecret = (string)config["access_secret"]; } if (config.ContainsKey("timeout")) { _timeout = 1000 * (int)config["timeout"]; } if (config.ContainsKey("rec_type")) { _recType = (RecognizerType)config["rec_type"]; } }
internal SapiRecognizer(RecognizerType type) { ISpRecognizer spRecognizer; try { spRecognizer = ((type != 0) ? ((ISpRecognizer) new SpSharedRecognizer()) : ((ISpRecognizer) new SpInprocRecognizer())); _isSap53 = (spRecognizer is ISpRecognizer2); } catch (COMException e) { throw RecognizerBase.ExceptionFromSapiCreateRecognizerError(e); } if (!IsSapi53 && Thread.CurrentThread.GetApartmentState() == ApartmentState.STA) { Marshal.ReleaseComObject(spRecognizer); _proxy = new SapiProxy.MTAThread(type); } else { _proxy = new SapiProxy.PassThrough(spRecognizer); } }
/// <summary> /// Loads the trained Eigen Recogniser from specified location /// </summary> /// <param name="filename"></param> public void LoadEigenRecogniser(string filename) { //Lets get the recogniser type from the file extension string ext = Path.GetExtension(filename); switch (ext) { case (".LBPH"): _recognizerType = RecognizerType.LBPHFaceRecognizer; recognizer = new LBPHFaceRecognizer(1, 8, 8, 8, 100);//50 break; case (".FFR"): _recognizerType = RecognizerType.FisherFaceRecognizer; recognizer = new FisherFaceRecognizer(0, 3500);//4000 break; case (".EFR"): _recognizerType = RecognizerType.EigenFaceRecognizer; recognizer = new EigenFaceRecognizer(80, double.PositiveInfinity); break; } //introduce error checking recognizer.Load(filename); //Now load the labels string direct = Path.GetDirectoryName(filename); _namesList.Clear(); if (File.Exists(direct + "/Labels.xml")) { FileStream filestream = File.OpenRead(direct + "/Labels.xml"); long filelength = filestream.Length; byte[] xmlBytes = new byte[filelength]; filestream.Read(xmlBytes, 0, (int)filelength); filestream.Close(); MemoryStream xmlStream = new MemoryStream(xmlBytes); using (XmlReader xmlreader = XmlTextReader.Create(xmlStream)) { while (xmlreader.Read()) { if (xmlreader.IsStartElement()) { switch (xmlreader.Name) { case "NAME": if (xmlreader.Read()) { _namesList.Add(xmlreader.Value.Trim()); } break; } } } } _contTrain = _numLabels; } IsTrained = true; }
/// <summary> /// Logs Canceled events /// And sets the TaskCompletionSource to 0, in order to trigger Recognition Stop /// </summary> private static void CanceledEventHandler(SpeechRecognitionCanceledEventArgs e, RecognizerType rt, TaskCompletionSource <int> source) { source.TrySetResult(0); Console.WriteLine("--- recognition canceled ---"); Console.WriteLine($"CancellationReason: {e.Reason.ToString()}. ErrorDetails: {e.ErrorDetails}."); }
/// <summary> /// Logs intermediate recognition results /// </summary> private static void RecognizingEventHandler(SpeechRecognitionEventArgs e, RecognizerType rt) { Console.WriteLine("Intermediate result: {0} ", e.Result.Text); }
private static void SpeechDetectedEventHandler(RecognitionEventArgs e, RecognizerType rt, string eventType) { Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "Speech recognition: Speech {0} detected event: {1}.", eventType, e.ToString())); }
/// <summary> /// Session started event handler. /// </summary> private static void SessionStartedEventHandler(SessionEventArgs e, RecognizerType rt) { Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "Speech recognition: Session started event: {0}.", e.ToString())); }