public void Shutdown() { if (_isActive) { StopSpeechRecognition(); } if (_speechRecognitionEngine != null) { _speechRecognitionEngine.UnloadAllGrammars(); _grammars.Clear(); _pendingLoadSpeechGrammarCounter = 0; _speechRecognitionEngine.SpeechDetected -= (SpeechRecognitionEngine_SpeechDetected); _speechRecognitionEngine.RecognizeCompleted -= (SpeechRecognitionEngine_RecognizeCompleted); _speechRecognitionEngine.LoadGrammarCompleted -= (SpeechRecognitionEngine_LoadGrammarCompleted); } if (_speechRecognitionConnector != null) { _speechRecognitionConnector.Dispose(); _speechRecognitionConnector = null; } _speechTranscript.Clear(); _transcriptRecorder = null; }
public override void Shutdown() { if (_state == TranscriptRecorderState.Terminated) { return; } _state = TranscriptRecorderState.Terminated; TerminateCall(); _transcriptRecorder.OnMediaTranscriptRecorderTerminated(this); _transcriptRecorder = null; }
public ConferenceTranscriptRecorder(TranscriptRecorderSession transcriptRecorder, Conversation conversation) { _transcriptRecorder = transcriptRecorder; _conversation = conversation; // TODO: TranscriptRecorderSession should check if new conversation is joined to a conference and do full // Begin/EndJoin with an End async method in ConferenceTranscriptRecorder (as is done for Invite or Escalate) if (_conversation.ConferenceSession != null) { _conference = _conversation.ConferenceSession; _state = TranscriptRecorderState.Active; _waitForInvitedConferenceJoined.Set(); RegisterConferenceEvents(); } }
public ConversationTranscriptRecorder(TranscriptRecorderSession transcriptRecorder, Conversation conversation, bool isSubConversation = false) { if (transcriptRecorder == null) { throw new ArgumentNullException("transcriptRecorder"); } if (conversation == null) { throw new ArgumentNullException("conversation"); } _transcriptRecorder = transcriptRecorder; _conversation = conversation; _isSubConversation = isSubConversation; RegisterConversationEvents(); }
public override void Shutdown() { if (_state == TranscriptRecorderState.Terminated) { return; } _state = TranscriptRecorderState.Terminated; // TODO: Shutdown message if (_conference != null) { UnregisterConferenceEvents(); } _transcriptRecorder.OnMediaTranscriptRecorderTerminated(this); _transcriptRecorder = null; }
public override void Shutdown() { if (_state == TranscriptRecorderState.Terminated) { return; } _state = TranscriptRecorderState.Terminated; if (this.IsSubConversation) { _transcriptRecorder.OnSubConversationRemoved(this.Conversation, this); } else { _transcriptRecorder.OnMediaTranscriptRecorderTerminated(this); TerminateConversation(); } _transcriptRecorder = null; }
public AVTranscriptRecorder(TranscriptRecorderSession transcriptRecorder, EventHandler<ConversationChangedEventArgs> audioVideoCallConversationChangedEventHandler = null, EventHandler<CallStateChangedEventArgs> audioVideoCallStateChangedEventHandler = null, EventHandler<AudioVideoFlowConfigurationRequestedEventArgs> audioVideoFlowConfigurationRequestedEventHandler = null, EventHandler<MediaFlowStateChangedEventArgs> audioVideoFlowStateChangedEventHandler = null) { if (transcriptRecorder == null) { throw new ArgumentNullException("transcriptRecorder"); } _transcriptRecorder = transcriptRecorder; _speechRecognizer = new SpeechRecognizer(transcriptRecorder); _audioVideoCallConversationChangedEventHandler = audioVideoCallConversationChangedEventHandler; _audioVideoCallStateChangedEventHandler = audioVideoCallStateChangedEventHandler; _audioVideoFlowConfigurationRequestedEventHandler = audioVideoFlowConfigurationRequestedEventHandler; _audioVideoFlowStateChangedEventHandler = audioVideoFlowStateChangedEventHandler; }
public override void Shutdown() { if (_state == TranscriptRecorderState.Terminated) { return; } _state = TranscriptRecorderState.Terminated; this.TerminateCall(); if (_speechRecognizer != null) { _speechRecognizer.Shutdown(); _speechRecognizer = null; } _transcriptRecorder.OnMediaTranscriptRecorderTerminated(this); _transcriptRecorder = null; }
public AVTranscriptRecorder(TranscriptRecorderSession transcriptRecorder, EventHandler <ConversationChangedEventArgs> audioVideoCallConversationChangedEventHandler = null, EventHandler <CallStateChangedEventArgs> audioVideoCallStateChangedEventHandler = null, EventHandler <AudioVideoFlowConfigurationRequestedEventArgs> audioVideoFlowConfigurationRequestedEventHandler = null, EventHandler <MediaFlowStateChangedEventArgs> audioVideoFlowStateChangedEventHandler = null) { if (transcriptRecorder == null) { throw new ArgumentNullException("transcriptRecorder"); } _transcriptRecorder = transcriptRecorder; _speechRecognizer = new SpeechRecognizer(transcriptRecorder); _audioVideoCallConversationChangedEventHandler = audioVideoCallConversationChangedEventHandler; _audioVideoCallStateChangedEventHandler = audioVideoCallStateChangedEventHandler; _audioVideoFlowConfigurationRequestedEventHandler = audioVideoFlowConfigurationRequestedEventHandler; _audioVideoFlowStateChangedEventHandler = audioVideoFlowStateChangedEventHandler; }
public IMTranscriptRecorder(TranscriptRecorderSession transcriptRecorder, EventHandler <CallStateChangedEventArgs> imCallStateChangedEventHandler = null, EventHandler <InstantMessagingFlowConfigurationRequestedEventArgs> imFlowConfigurationRequestedEventHandler = null, EventHandler <MediaFlowStateChangedEventArgs> imFlowStateChangedEventHandler = null, EventHandler <InstantMessageReceivedEventArgs> imFlowMessageReceivedEventHandler = null, EventHandler <ConversationChangedEventArgs> imCallConversationChangedEventHandler = null) { if (transcriptRecorder == null) { throw new ArgumentNullException("transcriptRecorder"); } _transcriptRecorder = transcriptRecorder; _imCallStateChangedEventHandler = imCallStateChangedEventHandler; _imFlowConfigurationRequestedEventHandler = imFlowConfigurationRequestedEventHandler; _imFlowStateChangedEventHandler = imFlowStateChangedEventHandler; _imFlowMessageReceivedEventHandler = imFlowMessageReceivedEventHandler; _imCallConversationChangedEventHandler = imCallConversationChangedEventHandler; }
public IMTranscriptRecorder(TranscriptRecorderSession transcriptRecorder, EventHandler<CallStateChangedEventArgs> imCallStateChangedEventHandler = null, EventHandler<InstantMessagingFlowConfigurationRequestedEventArgs> imFlowConfigurationRequestedEventHandler = null, EventHandler<MediaFlowStateChangedEventArgs> imFlowStateChangedEventHandler = null, EventHandler<InstantMessageReceivedEventArgs> imFlowMessageReceivedEventHandler = null, EventHandler<ConversationChangedEventArgs> imCallConversationChangedEventHandler = null) { if (transcriptRecorder == null) { throw new ArgumentNullException("transcriptRecorder"); } _transcriptRecorder = transcriptRecorder; _imCallStateChangedEventHandler = imCallStateChangedEventHandler; _imFlowConfigurationRequestedEventHandler = imFlowConfigurationRequestedEventHandler; _imFlowStateChangedEventHandler = imFlowStateChangedEventHandler; _imFlowMessageReceivedEventHandler = imFlowMessageReceivedEventHandler; _imCallConversationChangedEventHandler = imCallConversationChangedEventHandler; }
public void TerminateCall() { if (_instantMessagingFlow != null) { _instantMessagingFlow.StateChanged -= this.InstantMessagingFlow_StateChanged; _instantMessagingFlow.MessageReceived -= this.InstantMessagingFlow_MessageReceived; _instantMessagingFlow = null; } if (_instantMessagingCall != null) { _instantMessagingCall.BeginTerminate(InstantMessagingCallTerminated, _instantMessagingCall); _instantMessagingCall.StateChanged -= this.InstantMessagingCall_StateChanged; _instantMessagingCall.InstantMessagingFlowConfigurationRequested -= this.InstantMessagingCall_FlowConfigurationRequested; _instantMessagingCall.ConversationChanged -= this.InstantMessagingCall_ConversationChanged; _instantMessagingCall = null; } else { _waitForIMCallTerminated.Set(); } if (_subConversation != null) { _transcriptRecorder.OnSubConversationRemoved(_subConversation, this); _transcriptRecorder = null; } if (_subConversation != null) { _transcriptRecorder.OnSubConversationRemoved(_subConversation, this); _subConversation = null; } _waitForIMCallAccepted.Reset(); _waitForIMFlowStateChangedToActiveCompleted.Reset(); }
public SpeechRecognizer(TranscriptRecorderSession transcriptRecorder) { _transcriptRecorder = transcriptRecorder; _speechTranscript = new List <Microsoft.Speech.Recognition.RecognitionResult>(); _isActive = false; _isRecognizing = false; // Create a speech recognition connector _speechRecognitionConnector = new SpeechRecognitionConnector(); _currentSRLocale = ConfigurationManager.AppSettings[SpeechRecogLocaleKey]; if (String.IsNullOrEmpty(_currentSRLocale)) { NonBlockingConsole.WriteLine("No locale specified, using default locale for speech recognition: " + DefaultLocale); _currentSRLocale = DefaultLocale; } // Create speech recognition engine and start recognizing by attaching connector to engine try { _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(); /* * System.Globalization.CultureInfo localeCultureInfo = new System.Globalization.CultureInfo(_currentSRLocale); * foreach (RecognizerInfo r in Microsoft.Speech.Recognition.SpeechRecognitionEngine.InstalledRecognizers()) * { * if (r.Culture.Equals(localeCultureInfo)) * { * _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(r); * break; * } * } * if (_speechRecognitionEngine == null) * { * _speechRecognitionEngine = new SpeechRecognitionEngine(); * } */ //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } catch (Exception e) { NonBlockingConsole.WriteLine("Error: Unable to load SpeechRecognition locale: " + _currentSRLocale + ". Exception: " + e.ToString()); // Use default locale NonBlockingConsole.WriteLine("Falling back to default locale for SpeechRecognitionEngine: " + DefaultLocale); _currentSRLocale = DefaultLocale; _speechRecognitionEngine = new SpeechRecognitionEngine(); //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } _speechRecognitionEngine.SpeechDetected += new EventHandler <Microsoft.Speech.Recognition.SpeechDetectedEventArgs>(SpeechRecognitionEngine_SpeechDetected); _speechRecognitionEngine.RecognizeCompleted += new EventHandler <Microsoft.Speech.Recognition.RecognizeCompletedEventArgs>(SpeechRecognitionEngine_RecognizeCompleted); _speechRecognitionEngine.LoadGrammarCompleted += new EventHandler <Microsoft.Speech.Recognition.LoadGrammarCompletedEventArgs>(SpeechRecognitionEngine_LoadGrammarCompleted); _grammars = new List <Microsoft.Speech.Recognition.Grammar>(); // TODO: Add default installed speech recognizer grammar // TODO: Might already be done via compiling with Recognition Settings File? // Add default locale language grammar file (if it exists) String localLanguageGrammarFilePath = Path.Combine(Environment.CurrentDirectory, @"en-US.cfgpp"); if (File.Exists(localLanguageGrammarFilePath)) { NonBlockingConsole.WriteLine("SpeechRecognizer(). Adding locale language file at path: " + localLanguageGrammarFilePath); GrammarBuilder builder = new GrammarBuilder(); builder.AppendRuleReference(localLanguageGrammarFilePath); Grammar localeLanguageGrammar = new Grammar(builder); localeLanguageGrammar.Name = "Local language grammar"; //localeLanguageGrammar.Priority = 1; _grammars.Add(localeLanguageGrammar); } string[] recognizedString = { "hello", "bye", "yes", "no", "help", "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" }; Choices numberChoices = new Choices(recognizedString); Grammar basicGrammar = new Grammar(new GrammarBuilder(numberChoices)); basicGrammar.Name = "Basic Grammar"; //basicGrammar.Priority = 2; _grammars.Add(basicGrammar); LoadSpeechGrammarAsync(); }
public SpeechRecognizer(TranscriptRecorderSession transcriptRecorder) { _transcriptRecorder = transcriptRecorder; _speechTranscript = new List<Microsoft.Speech.Recognition.RecognitionResult>(); _isActive = false; _isRecognizing = false; // Create a speech recognition connector _speechRecognitionConnector = new SpeechRecognitionConnector(); _currentSRLocale = ConfigurationManager.AppSettings[SpeechRecogLocaleKey]; if (String.IsNullOrEmpty(_currentSRLocale)) { NonBlockingConsole.WriteLine("No locale specified, using default locale for speech recognition: " + DefaultLocale); _currentSRLocale = DefaultLocale; } // Create speech recognition engine and start recognizing by attaching connector to engine try { _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(); /* System.Globalization.CultureInfo localeCultureInfo = new System.Globalization.CultureInfo(_currentSRLocale); foreach (RecognizerInfo r in Microsoft.Speech.Recognition.SpeechRecognitionEngine.InstalledRecognizers()) { if (r.Culture.Equals(localeCultureInfo)) { _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(r); break; } } if (_speechRecognitionEngine == null) { _speechRecognitionEngine = new SpeechRecognitionEngine(); } */ //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } catch (Exception e) { NonBlockingConsole.WriteLine("Error: Unable to load SpeechRecognition locale: " + _currentSRLocale + ". Exception: " + e.ToString()); // Use default locale NonBlockingConsole.WriteLine("Falling back to default locale for SpeechRecognitionEngine: " + DefaultLocale); _currentSRLocale = DefaultLocale; _speechRecognitionEngine = new SpeechRecognitionEngine(); //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } _speechRecognitionEngine.SpeechDetected += new EventHandler<Microsoft.Speech.Recognition.SpeechDetectedEventArgs>(SpeechRecognitionEngine_SpeechDetected); _speechRecognitionEngine.RecognizeCompleted += new EventHandler<Microsoft.Speech.Recognition.RecognizeCompletedEventArgs>(SpeechRecognitionEngine_RecognizeCompleted); _speechRecognitionEngine.LoadGrammarCompleted += new EventHandler<Microsoft.Speech.Recognition.LoadGrammarCompletedEventArgs>(SpeechRecognitionEngine_LoadGrammarCompleted); _grammars = new List<Microsoft.Speech.Recognition.Grammar>(); // TODO: Add default installed speech recognizer grammar // TODO: Might already be done via compiling with Recognition Settings File? // Add default locale language grammar file (if it exists) String localLanguageGrammarFilePath = Path.Combine(Environment.CurrentDirectory, @"en-US.cfgpp"); if (File.Exists(localLanguageGrammarFilePath)) { NonBlockingConsole.WriteLine("SpeechRecognizer(). Adding locale language file at path: " + localLanguageGrammarFilePath); GrammarBuilder builder = new GrammarBuilder(); builder.AppendRuleReference(localLanguageGrammarFilePath); Grammar localeLanguageGrammar = new Grammar(builder); localeLanguageGrammar.Name = "Local language grammar"; //localeLanguageGrammar.Priority = 1; _grammars.Add(localeLanguageGrammar); } string[] recognizedString = { "hello", "bye", "yes", "no", "help", "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" }; Choices numberChoices = new Choices(recognizedString); Grammar basicGrammar = new Grammar(new GrammarBuilder(numberChoices)); basicGrammar.Name = "Basic Grammar"; //basicGrammar.Priority = 2; _grammars.Add(basicGrammar); LoadSpeechGrammarAsync(); }