public void SubscribeToCpuPortEvents() { Implementation.runInb += (sender, args) => { try { MyInb(args.value, args.port); // NonBlockingConsole.WriteLine($"inb, port: {args.port}, value: {args.value}"); } catch { NonBlockingConsole.WriteLine($"inb, port: {args.port}, value: {args.value}"); throw; } }; Implementation.runOutb += (sender, args) => { try { MyOutb(args.port, args.value); // NonBlockingConsole.WriteLine($"outb, port: {args.port}, value: {args.value}"); } catch { NonBlockingConsole.WriteLine($"outb, port: {args.port}, value: {args.value}"); throw; } }; Implementation.runOutw += (sender, args) => { try { MyOutw(args.port, args.value); } catch { Console.WriteLine($"outb, value: {args.value}, port: {args.port}"); throw; } }; }
private static void HashAndDedupeFile(ref Dictionary <IntHash, string> hashToOriginalFilenames, string filename) { var file = File.OpenRead(filename); var md5Hash = new IntHash(MD5.Create().ComputeHash(file)); file.Close(); if (!hashToOriginalFilenames.ContainsKey(md5Hash)) { hashToOriginalFilenames[md5Hash] = filename; } else //collision! { NonBlockingConsole.WriteLine(String.Format("Dupe detected! {0} hashed as a duplicate of {1} with an MD5 of {2}. Deleting duplicate...", filename, hashToOriginalFilenames[md5Hash], md5Hash)); FileSystem.DeleteFile( filename, UIOption.OnlyErrorDialogs, RecycleOption.SendToRecycleBin, UICancelOption.ThrowException ); } }
//Flow configuration requested indicates that there is a flow present to begin media operations with that it is no longer null, and is ready to be configured. public void AudioVideoCall_FlowConfigurationRequested(object sender, AudioVideoFlowConfigurationRequestedEventArgs e) { NonBlockingConsole.WriteLine("AV Flow Configuration Requested."); _audioVideoFlow = e.Flow; Message m = new Message("AV Flow Configuration Requested.", MessageType.Audio, _transcriptRecorder.Conversation.Id); _transcriptRecorder.OnMessageReceived(m); //Now that the flow is non-null, bind the event handler for State Changed. // When the flow goes active, (as indicated by the state changed event) the program will perform media related actions.. _audioVideoFlow.StateChanged += new EventHandler <MediaFlowStateChangedEventArgs>(AudioVideoFlow_StateChanged); // call top level event handler if (_audioVideoFlowConfigurationRequestedEventHandler != null) { _audioVideoFlowConfigurationRequestedEventHandler(sender, e); } }
void Conversation_StateChanged(object sender, StateChangedEventArgs <ConversationState> e) { Conversation conv = sender as Conversation; NonBlockingConsole.WriteLine("Conversation {0} state changed from " + e.PreviousState + " to " + e.State, conv.LocalParticipant.UserAtHost); Message m = new Message("Conversation state changed from " + e.PreviousState.ToString() + " to " + e.State.ToString(), MessageType.ConversationInfo, _conversation.Id); _transcriptRecorder.OnMessageReceived(m); if (e.State == ConversationState.Established || e.State == ConversationState.Conferenced) { _waitForConversationJoined.Set(); } else if (e.State == ConversationState.Terminating || e.State == ConversationState.Terminated) { _waitForConversationTerminated.Set(); this.Shutdown(); } }
private void AudioVideoCallTerminated(IAsyncResult ar) { AudioVideoCall audioVideoCall = ar.AsyncState as AudioVideoCall; try { // End terminating the incoming call. audioVideoCall.EndTerminate(ar); // audioVideoCall.AudioVideoFlowConfigurationRequested -= this.AudioVideoCall_FlowConfigurationRequested; // audioVideoCall.StateChanged -= this.AudioVideoCall_StateChanged; } catch (Exception e) { NonBlockingConsole.WriteLine("Error terminating AV call: " + e.ToString()); // TODO: Error message } finally { //Again, just to sync the completion of the code. _waitForAudioVideoCallTerminated.Set(); } }
public static int Main(string[] args) { try { CreateWebHostBuilder(args).Build().Run(); return(ExitCode); } catch (System.OperationCanceledException ex) { NonBlockingConsole.WriteLine(ex.Message); return(-2); } catch (Exception ex) { NonBlockingConsole.WriteLine(ex); return(-1); } finally { NonBlockingConsole.AllWritten.Wait(); } }
public void TestTrigger(string testFile) { var currentConsoleOut = Console.Out; string path = Path.Combine(_testDirectoryPath, testFile); var ruleExceptions = new List <IRuleException>(); var reporter = new ConsoleReporter(); var startTime = DateTime.Now; using (TextReader textReader = File.OpenText(path)) { var pluginContext = new PluginContext(path, ruleExceptions, textReader); IPlugin plugin = new MyPlugin(); plugin.PerformAction(pluginContext, reporter); } var endtime = DateTime.Now; var duration = startTime - endtime; using (var consoleOutput = new ConsoleOutput()) { reporter.ReportResults(duration, 3); NonBlockingConsole.Consumer(); var consoleOutputValues = consoleOutput.GetOuput().Split('\n'); var result = consoleOutputValues?.Where(x => x.Trim().EndsWith("Errors.")) .FirstOrDefault(); switch (testFile) { case "Bank_tr_Test.sql": Assert.AreEqual("1 Errors.", result); break; case "Bank_tri_Test.sql": case "Bank_tri_Test1.sql": Assert.AreEqual("0 Errors.", result); break; } } Assert.AreEqual(currentConsoleOut, Console.Out); }
private static void SafetyCheck(string[] args) { if (args.Length != 2) { NonBlockingConsole.WriteLine( "FileWatcher requires two arguments to run:" + "\n\t1) The path to the folder you wish to watch" + "\n\t2) A pattern indicating which file type to watch for." + "\n" + "\nExample: FileWatcher.exe \"c:\\file folder\" *.txt"); QuitProgram(true); } var fileLocation = args[0]; if (!Directory.Exists(fileLocation)) { NonBlockingConsole.WriteLine( $"Folder location provided ({fileLocation}) either does not exist or you don't have permissions to access it."); QuitProgram(true); } }
public static void Main(string[] args) { try { NonBlockingConsole.WriteLine("running tsqllint"); var application = new Application(args, new ConsoleReporter()); application.Run(); Task.Run(() => { while (NonBlockingConsole.messageQueue.Count > 0) { } }).Wait(); } catch (Exception exception) { Console.WriteLine("TSQLLint encountered a problem."); Console.WriteLine(exception); } }
protected virtual void Dispose(bool disposing) { if (!disposedValue) { if (disposing) { try { Socks5Server?.Stop(); } catch { } //Terminating, we dont care. try { Socks5Server?.Server?.Dispose(); } catch { } //Terminating, we dont care. var clients = Clients.ToArray(); //Stop any active clients. foreach (ClientTunnel cTun in clients) { NonBlockingConsole.WriteLine($"Stopping client {cTun.ClientLocalEndPont}"); cTun?.Stop(); cTun?.Dispose(); Clients.Remove(cTun); } } disposedValue = true; } }
// Just to record the state transitions in the console. void ConferenceSession_ParticipantEndpointPropertiesChanged(object sender, ParticipantEndpointPropertiesChangedEventArgs <ConferenceParticipantEndpointProperties> e) { ConferenceSession confSession = sender as ConferenceSession; NonBlockingConsole.WriteLine( "{0} is notified of ConferenceSession participant property change for user: {1}. Role:{2}, CanManageLobby:{3}, InLobby:{4}", confSession.Conversation.LocalParticipant.UserAtHost, e.ParticipantEndpoint.Participant.UserAtHost, e.Properties.Role.ToString(), e.Properties.CanManageLobby.ToString(), e.Properties.IsInLobby.ToString()); NonBlockingConsole.WriteLine(""); Message m = new Message("Conference participant properties changed. Changed properties: " + e.ChangedPropertyNames.ToString() + ". New property values: " + e.Properties.ToString() + ".", e.ParticipantEndpoint.Participant.DisplayName, e.ParticipantEndpoint.Participant.UserAtHost, e.ParticipantEndpoint.Participant.Uri, MessageType.ConferenceInfo, _conversation.Id, confSession.ConferenceUri, MessageDirection.Incoming); _transcriptRecorder.OnMessageReceived(m); }
private void InstantMessagingCallTerminated(IAsyncResult ar) { InstantMessagingCall instantMessagingCall = ar.AsyncState as InstantMessagingCall; try { // End terminating the incoming call. instantMessagingCall.EndTerminate(ar); // Remove this event handler now that the call has been terminated. instantMessagingCall.StateChanged -= InstantMessagingCall_StateChanged; instantMessagingCall.InstantMessagingFlowConfigurationRequested -= this.InstantMessagingCall_FlowConfigurationRequested; } catch (Exception e) { NonBlockingConsole.WriteLine(e.ToString()); } finally { //Again, just to sync the completion of the code. _waitForIMCallTerminated.Set(); } }
// Callback that handles when the state of an AudioVideoFlow changes private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e) { NonBlockingConsole.WriteLine("AV flow state changed from " + e.PreviousState + " to " + e.State); if (_transcriptRecorder != null) { Message m = new Message("AudioVideoFlow changed from " + e.PreviousState + " to " + e.State + ".", MessageType.Audio, _transcriptRecorder.Conversation.Id); _transcriptRecorder.OnMessageReceived(m); } //When flow is active, media operations can begin if (e.State == MediaFlowState.Active) { // Flow-related media operations normally begin here. _waitForAudioVideoFlowStateChangedToActiveCompleted.Set(); if ((_speechRecognizer != null) && !_speechRecognizer.IsActive) { _speechRecognizer.AttachAndStartSpeechRecognition(_audioVideoFlow); } } else if (e.State == MediaFlowState.Terminated) { if ((_speechRecognizer != null) && _speechRecognizer.IsActive) { _speechRecognizer.StopSpeechRecognition(); } } // call top level event handler if (_audioVideoFlowStateChangedEventHandler != null) { _audioVideoFlowStateChangedEventHandler(sender, e); } }
/// <summary> /// Ends the escalation to conference. /// </summary> /// <param name="argument">The argument.</param> /// <remarks></remarks> private void EndEscalateConversation(IAsyncResult argument) { Conversation conversation = argument.AsyncState as Conversation; Exception exception = null; try { conversation.EndEscalateToConference(argument); NonBlockingConsole.WriteLine("Conversation was escalated into conference"); } catch (OperationFailureException operationFailureException) { // OperationFailureException: Indicates failure to connect the call to the remote party. // It is left to the application to perform real error handling here. NonBlockingConsole.WriteLine(operationFailureException.ToString()); exception = operationFailureException; } catch (RealTimeException realTimeException) { // RealTimeException may be thrown on media or link-layer failures. // It is left to the application to perform real error handling here. NonBlockingConsole.WriteLine(realTimeException.ToString()); exception = realTimeException; } finally { _state = TranscriptRecorderState.Active; _waitForConferenceEscalationCompleted.Set(); //Again, just to sync the completion of the code. if (exception != null) { string originator = string.Format("Error when escalating to conference."); NonBlockingConsole.WriteLine(originator); } } }
private static void RenameFilesinFolderWithHash(string folderPath) { var filenames = Directory.EnumerateFiles(folderPath); //probably faster to maintain this to check for hash collisions than checking the directory every time NonBlockingConsole.WriteLine(String.Format("Working on folder {0}...", folderPath)); var hashToOriginalFilenames = HashFiles(new List <string>(filenames)); NonBlockingConsole.WriteLine("No dupes in " + folderPath + ", renaming files..."); var filenamesToFileHash = new Dictionary <string, IntHash>(); foreach (var hashAndFilename in hashToOriginalFilenames) { filenamesToFileHash[hashAndFilename.Value] = hashAndFilename.Key; } foreach (var hashAndFilename in hashToOriginalFilenames) { RenameFileToHashname(hashAndFilename.Value, hashAndFilename.Key, filenamesToFileHash); } NonBlockingConsole.WriteLine("Done with " + folderPath); }
private void InstantMessagingFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e) { NonBlockingConsole.WriteLine("IM flow state changed from " + e.PreviousState + " to " + e.State); Message m = new Message("InstantMessagingFlow changed from " + e.PreviousState + " to " + e.State + ".", MessageType.InstantMessage, _transcriptRecorder.Conversation.Id); _transcriptRecorder.OnMessageReceived(m); // When flow is active, media operations (here, sending an IM) // may begin. if (e.State == MediaFlowState.Active) { // When flow is active, media operations can begin _waitForIMFlowStateChangedToActiveCompleted.Set(); } // call top level event handler if (_imFlowStateChangedEventHandler != null) { _imFlowStateChangedEventHandler(sender, e); } }
public void ConferenceInviteAccepted(IAsyncResult result) { try { ConferenceInvitation invite = result.AsyncState as ConferenceInvitation; // ConferenceInvite already accepted in TranscriptRecorder.ConferenceInvitation_AcceptCompleted() Message m = new Message("ConferenceSession.ConferenceInviteAccepted()", MessageType.ConferenceInfo, _conversation.Id, invite.ConferenceUri); _transcriptRecorder.OnMessageReceived(m); ConferenceJoinOptions cjo = new ConferenceJoinOptions(); //cjo.JoinAsTrustedApplication = false; _conversation.ConferenceSession.BeginJoin(cjo, EndJoinInvitedConference, invite); } catch (RealTimeException ex) { NonBlockingConsole.WriteLine("invite.EndAccept failed. Exception: {0}", ex.ToString()); } catch (InvalidOperationException ex) { NonBlockingConsole.WriteLine("m_conversation.ConferenceSession.BeginJoin failed. Exception: {0}", ex.ToString()); } }
public void NewLine() { NonBlockingConsole.Write("@r"); }
public void Configure(IApplicationBuilder app, IWebHostEnvironment env, IHostApplicationLifetime applicationLifetime) { // TODO Implement IDefinitionGroupArea with Begin, End. And remove AddressNameConverter.AddNamespace. AddressNameConverter.AddNamespace(new Interval <Address, Address.Comparer>(0x1016_5d52, 0x1019_c3ce), "sys"); foreach (var interval in RawProgramMain.MveForceEndIntervals) { AddressNameConverter.AddNamespace(interval, "mve"); } // Redecode. var redecodeCount = Configuration.GetValue <int?>("redecode"); if (redecodeCount != null) { NonBlockingConsole.WriteLine("Start Redecode."); var methodsInfo = app.ApplicationServices.GetRequiredService <MethodInfoCollection>(); var definitionCollection = app.ApplicationServices.GetRequiredService <DefinitionCollection>(); var redecode = new Redecode(ConfigurationDto.BinToCSharp, methodsInfo, definitionCollection); redecode.LimitFiles = redecodeCount.Value; redecode.Start(GetType().Assembly); applicationLifetime.StopApplication(); return; } // Asp. if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } else { app.UseExceptionHandler("/Error"); } app.UseStaticFiles(); app.UseRouting(); app.UseEndpoints( endpoints => { endpoints.MapHub <MainHub>("/signalr"); endpoints.MapControllers(); endpoints.MapRazorPages(); }); // Start M.A.X. var rawProgramMain = app.ApplicationServices.GetRequiredService <RawProgramMain>(); Task.Run( () => { try { rawProgramMain.Start(); } catch (OperationCanceledException ex) { NonBlockingConsole.WriteLine(ex.Message); ExitCode = -2; } catch (Exception ex) { NonBlockingConsole.WriteLine(ex); ExitCode = -1; } applicationLifetime.StopApplication(); }); }
/// <summary> /// Write a message to the console. /// </summary> /// <param name="consoleColor">The color of the console to write the text with.</param> /// <param name="format">The template string.</param> /// <param name="args">The argument for the template string.</param> private void WriteLine(ConsoleColor consoleColor, string format, params object[] args) { NonBlockingConsole.WriteLine(string.Format($"{DateTime.Now:s}: {format}", args), consoleColor); }
/// <summary> /// Write a message to the console. /// </summary> /// <param name="message">The message.</param> /// <param name="consoleColor">The color of the console to write the text with.</param> private void WriteLine(string message, ConsoleColor consoleColor) { NonBlockingConsole.WriteLine($"{DateTime.Now:s}: {message}", consoleColor); }
// Just to record the state transitions in the console. void ConferenceSession_PropertiesChanged(object sender, PropertiesChangedEventArgs <ConferenceSessionProperties> e) { ConferenceSession confSession = sender as ConferenceSession; string propertyValue = null; foreach (string property in e.ChangedPropertyNames) { // Record all ConferenceSession property changes. switch (property) { case "AccessLevel": propertyValue = e.Properties.AccessLevel.ToString(); break; case "AutomaticLeaderAssignment": propertyValue = e.Properties.AutomaticLeaderAssignment.ToString(); break; case "ConferenceUri": propertyValue = e.Properties.ConferenceUri; break; case "Disclaimer": propertyValue = e.Properties.Disclaimer; break; case "DisclaimerTitle": propertyValue = e.Properties.DisclaimerTitle; break; case "HostingNetwork": propertyValue = e.Properties.HostingNetwork.ToString(); break; case "LobbyBypass": propertyValue = e.Properties.LobbyBypass.ToString(); break; case "Organizer": propertyValue = e.Properties.Organizer.UserAtHost; break; case "ParticipantData": propertyValue = e.Properties.ParticipantData; break; case "RecordingPolicy": propertyValue = e.Properties.RecordingPolicy.ToString(); break; case "SchedulingTemplate": propertyValue = e.Properties.SchedulingTemplate.ToString(); break; case "Subject": propertyValue = e.Properties.Subject; break; } NonBlockingConsole.WriteLine("{0} is notified of ConferenceSession property change. {1}: {2}", confSession.Conversation.LocalParticipant.UserAtHost, property, propertyValue); Message m = new Message("ConferenceSession property " + property + " changed to new value: " + propertyValue + ".", confSession.Conversation.LocalParticipant.DisplayName, confSession.Conversation.LocalParticipant.UserAtHost, confSession.Conversation.LocalParticipant.Uri, DateTime.Now, confSession.Conversation.Id, confSession.ConferenceUri, MessageType.ConferenceInfo, MessageDirection.Outgoing); _transcriptRecorder.OnMessageReceived(m); } NonBlockingConsole.WriteLine(""); // TODO: If modalities added, establish calls on new modalities }
/// <summary> /// Occurs when bot joined the conference. /// </summary> /// <param name="result">The argument.</param> /// <remarks></remarks> public void EndJoinInvitedConference(IAsyncResult result) { ConferenceInvitation invite = result.AsyncState as ConferenceInvitation; Exception exception = null; List <String> activeMediaTypes = new List <string>(); try { NonBlockingConsole.WriteLine("Joined the invited conference"); activeMediaTypes = invite.AvailableMediaTypes.ToList(); _conversation.ConferenceSession.EndJoin(result); _conference = _conversation.ConferenceSession; NonBlockingConsole.WriteLine(string.Format( "Conference Url: conf:{0}%3Fconversation-id={1}", _conversation.ConferenceSession.ConferenceUri, _conversation.ConferenceSession.Conversation.Id)); RegisterConferenceEvents(); // Raise event on TranscriptRecorderSession _transcriptRecorder.RaiseTranscriptRecorderSessionChanged(_conference); // Establish Calls for Conference's supported modalities if (activeMediaTypes.Contains(MediaType.Audio)) { _transcriptRecorder.OnActiveMediaTypeCallToEstablish(_conversation, TranscriptRecorderType.AudioVideo); } if (activeMediaTypes.Contains(MediaType.Message)) { _transcriptRecorder.OnActiveMediaTypeCallToEstablish(_conversation, TranscriptRecorderType.InstantMessage); } _waitForInvitedConferenceActiveMediaTypeCallEstablished.Set(); } catch (ConferenceFailureException conferenceFailureException) { // ConferenceFailureException may be thrown on failures due to MCUs being absent or unsupported, or due to malformed parameters. // It is left to the application to perform real error handling here. NonBlockingConsole.WriteLine(conferenceFailureException.ToString()); exception = conferenceFailureException; } catch (RealTimeException realTimeException) { // It is left to the application to perform real error handling here. NonBlockingConsole.WriteLine(realTimeException.ToString()); exception = realTimeException; } finally { // Again, for sync. reasons. _state = TranscriptRecorderState.Active; _waitForInvitedConferenceJoined.Set(); if (exception != null) { string originator = string.Format("Error when joining the invited conference: {0}", exception.ToString()); NonBlockingConsole.WriteLine(originator); } } }
private void Receive(IAsyncResult ar) { if (ar.AsyncState is Socket socket) { var _endPointReference = (EndPoint) new IPEndPoint(IPAddress.Any, 53); try { SocketFlags flags = SocketFlags.None; var rec = socket.EndReceiveMessageFrom(ar, ref flags, ref _endPointReference, out IPPacketInformation packetInfo); byte[] clientToUsBuffer = new byte[512]; Array.Copy(LocalBuffer, clientToUsBuffer, rec); //If you wish to inject the request, do so inside Questions parsing. using (Request req = new Request(clientToUsBuffer)) { foreach (var question in req.Questions) { NonBlockingConsole.WriteLine($"Request: Type {question.QType} Class {question.QClass} Name {question.QName}"); } RemoteSocket.Send(req.ByteArray); } byte[] remoteToUsBuffer = new byte[512]; RemoteSocket.Receive(remoteToUsBuffer); //If you wish to inject the response, do so inside Resource parsing. using (Response resp = new Response(remoteToUsBuffer)) { foreach (var answer in resp.Answers) { NonBlockingConsole.WriteLine($"Answer Response: Type {answer.Type} Class {answer.Class} Record {answer.RECORD} Name {answer.Name}"); } foreach (var att in resp.Authorities) { NonBlockingConsole.WriteLine($"Authorities Response: Type {att.Type} Class {att.Class} Record {att.RECORD} Name {att.Name}"); } foreach (var att in resp.Additionals) { NonBlockingConsole.WriteLine($"Additionals Response: Type {att.Type} Class {att.Class} Record {att.RECORD} Name {att.Name}"); } using (Socket c = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp)) { c.Connect(_endPointReference); c.Send(resp.ByteArray); } } } catch (Exception ex) { NonBlockingConsole.WriteLine(ex.Message); } if (!Stop) { socket.BeginReceiveMessageFrom(LocalBuffer, 0, LocalBuffer.Length, 0, ref _endPointReference, Receive, socket); } } }
public void Char(int codePoint) { NonBlockingConsole.Write(char.ConvertFromUtf32(codePoint)); }
public SpeechRecognizer(TranscriptRecorderSession transcriptRecorder) { _transcriptRecorder = transcriptRecorder; _speechTranscript = new List <Microsoft.Speech.Recognition.RecognitionResult>(); _isActive = false; _isRecognizing = false; // Create a speech recognition connector _speechRecognitionConnector = new SpeechRecognitionConnector(); _currentSRLocale = ConfigurationManager.AppSettings[SpeechRecogLocaleKey]; if (String.IsNullOrEmpty(_currentSRLocale)) { NonBlockingConsole.WriteLine("No locale specified, using default locale for speech recognition: " + DefaultLocale); _currentSRLocale = DefaultLocale; } // Create speech recognition engine and start recognizing by attaching connector to engine try { _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(); /* * System.Globalization.CultureInfo localeCultureInfo = new System.Globalization.CultureInfo(_currentSRLocale); * foreach (RecognizerInfo r in Microsoft.Speech.Recognition.SpeechRecognitionEngine.InstalledRecognizers()) * { * if (r.Culture.Equals(localeCultureInfo)) * { * _speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(r); * break; * } * } * if (_speechRecognitionEngine == null) * { * _speechRecognitionEngine = new SpeechRecognitionEngine(); * } */ //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } catch (Exception e) { NonBlockingConsole.WriteLine("Error: Unable to load SpeechRecognition locale: " + _currentSRLocale + ". Exception: " + e.ToString()); // Use default locale NonBlockingConsole.WriteLine("Falling back to default locale for SpeechRecognitionEngine: " + DefaultLocale); _currentSRLocale = DefaultLocale; _speechRecognitionEngine = new SpeechRecognitionEngine(); //_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale)); } _speechRecognitionEngine.SpeechDetected += new EventHandler <Microsoft.Speech.Recognition.SpeechDetectedEventArgs>(SpeechRecognitionEngine_SpeechDetected); _speechRecognitionEngine.RecognizeCompleted += new EventHandler <Microsoft.Speech.Recognition.RecognizeCompletedEventArgs>(SpeechRecognitionEngine_RecognizeCompleted); _speechRecognitionEngine.LoadGrammarCompleted += new EventHandler <Microsoft.Speech.Recognition.LoadGrammarCompletedEventArgs>(SpeechRecognitionEngine_LoadGrammarCompleted); _grammars = new List <Microsoft.Speech.Recognition.Grammar>(); // TODO: Add default installed speech recognizer grammar // TODO: Might already be done via compiling with Recognition Settings File? // Add default locale language grammar file (if it exists) String localLanguageGrammarFilePath = Path.Combine(Environment.CurrentDirectory, @"en-US.cfgpp"); if (File.Exists(localLanguageGrammarFilePath)) { NonBlockingConsole.WriteLine("SpeechRecognizer(). Adding locale language file at path: " + localLanguageGrammarFilePath); GrammarBuilder builder = new GrammarBuilder(); builder.AppendRuleReference(localLanguageGrammarFilePath); Grammar localeLanguageGrammar = new Grammar(builder); localeLanguageGrammar.Name = "Local language grammar"; //localeLanguageGrammar.Priority = 1; _grammars.Add(localeLanguageGrammar); } string[] recognizedString = { "hello", "bye", "yes", "no", "help", "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" }; Choices numberChoices = new Choices(recognizedString); Grammar basicGrammar = new Grammar(new GrammarBuilder(numberChoices)); basicGrammar.Name = "Basic Grammar"; //basicGrammar.Priority = 2; _grammars.Add(basicGrammar); LoadSpeechGrammarAsync(); }
public void MessageStart() { NonBlockingConsole.WriteLine(""); }
/// <summary> /// Begin listening for client requests. /// </summary> public void Listen() { using (NetworkStream clientStream = ClientTCPClient.GetStream()) { //Authentication and Endpoint request process. while (IsRunning) { try { //How much bytes is our client sending. byte[] buffer = new byte[ClientTCPClient.Available]; if (buffer.Length > 0) { Inactivity.Restart(); //Read those bytes. var read = clientStream.Read(buffer, 0, buffer.Length); //Parse the header. var message = ReadMessageType(this, buffer); //Process MessageType switch (message) { //Client requeste with NoAuth method. case MessageType.MethodRequest: { //Parse the client method request. using (MethodRequest mReq = new MethodRequest(buffer)) { if (mReq.Valid) //We successfully built the packet. { AuthMethod = mReq.Method; if (AuthMethod == Method.NoAuthentication) { NonBlockingConsole.WriteLine($"Client {ClientTCPClient?.Client?.RemoteEndPoint} AUTHENTICATION complete."); Authenticated = true; //Flag as authenticated. } //Write response onto our client stream. using (MethodResponse response = new MethodResponse(mReq)) clientStream.Write(response.Data, 0, response.Data.Length); } else { throw new Exception($"Client {ClientTCPClient?.Client?.RemoteEndPoint} Invalid Method request."); } break; } } //Client sent auth request, in this case, UserPass which is the only "secure" method supported. case MessageType.AuthRequest: { //Parse the client auth request. using (AuthenticationRequest authReq = new AuthenticationRequest(buffer)) { if (authReq.Valid) //We successfully built the packet. { //Create our response which will also validate login information. using (AuthenticationResponse authRes = new AuthenticationResponse(authReq)) { if (authRes.Valid) { NonBlockingConsole.WriteLine($"Client {ClientTCPClient?.Client?.RemoteEndPoint} AUTHENTICATION complete."); Authenticated = true; //Flag as authenticated. } //Send handshake result. clientStream.Write(authRes.Data, 0, authRes.Data.Length); } if (!Authenticated) //Wrong authentication, throw and disconnect this client. { throw new Exception($"Client {ClientTCPClient?.Client?.RemoteEndPoint} Invalid authentication request."); } } else //Wrong authentication, throw and disconnect this client. { throw new Exception($"Client {ClientTCPClient?.Client?.RemoteEndPoint} Invalid authentication request."); } break; } } //After a successful handshake, the client tell us his request. case MessageType.DataRequest: { //Parse the datarequest from the client. using (var dReq = new DataRequest(buffer)) { //Handle the command, so far we only support 0x01 CONNECT. switch (dReq.Command) { case Command.Connect: //Create the remote socket according to the request AddressFamily if (RemoteTCPClient == null) { RemoteTCPClient = new TcpClient(dReq.DestinationAddress.AddressFamily); RemoteTCPClient.ReceiveBufferSize = 512000; //512Kb RemoteTCPClient.SendBufferSize = 512000; //512Kb } NonBlockingConsole.WriteLine($"Client {ClientTCPClient?.Client?.RemoteEndPoint} CONNECT request to {dReq?.DestinationAddress}:{dReq?.Port}"); //Try to connect to the remote endpoint the client is requesting. RemoteTCPClient.Connect(dReq.DestinationAddress, dReq.Port); Result result; if (RemoteTCPClient.Connected) { result = Result.Succeeded; NonBlockingConsole.WriteLine($"Client {ClientTCPClient?.Client?.RemoteEndPoint} GRANTED connection. Entering tunnel mode."); } else { result = Result.Network_Unreachable; NonBlockingConsole.WriteLine($"Client {ClientTCPClient?.Client?.RemoteEndPoint} host REJECTED the connection."); } //Write the result to our client stream. using (DataResponse dResponse = new DataResponse(result, dReq.AddressType, dReq.DestinationBytes, dReq.PortBytes)) clientStream.Write(dResponse.Data, 0, dResponse.Data.Length); if (result != Result.Succeeded) { throw new Exception($"Client {ClientTCPClient?.Client?.RemoteEndPoint} Remote host reject the connection."); } break; default: throw new Exception($"Client {ClientTCPClient?.Client?.RemoteEndPoint} Unsuported message, disconnecting client."); } } break; } case MessageType.Null: throw new Exception("Invalid routing."); } } buffer = null; //At this point, if we have stablished a remote connection we go on and break in order to enter tunnel mode. if (RemoteTCPClient != null && RemoteTCPClient.Connected) { break; } //If this client has reported no activity in 10 seconds, kill it. We have no better way of knowing with TcpClients. if (Inactivity.Elapsed.TotalSeconds > 10) { break; } Thread.Sleep(1); } catch (Exception ex) { //Something went wrong, exit loop, this will unstuck the calling thread and will call Dispose on this object. NonBlockingConsole.WriteLine($"[ERROR] {ex.Message}"); break; } } try { if (Authenticated) { //The client already went through handshake and datarequest, at this point we are just passing data between client <-> remote using (NetworkStream remoteStream = RemoteTCPClient.GetStream()) { while (IsRunning && Authenticated) { if (Inactivity.Elapsed.TotalSeconds > 10) { break; } try { byte[] buffer = new byte[ClientTCPClient.Available]; if (buffer.Length > 0) { Inactivity.Restart(); var read = clientStream.Read(buffer, 0, buffer.Length); remoteStream.Write(buffer, 0, buffer.Length); } byte[] remoteBuff = new byte[RemoteTCPClient.Available]; if (remoteBuff.Length > 0) { Inactivity.Restart(); remoteStream.Read(remoteBuff, 0, remoteBuff.Length); clientStream.Write(remoteBuff, 0, remoteBuff.Length); } Thread.Sleep(1); } catch (Exception ex) { //Something went wrong, exit loop, this will unstuck the calling thread and will call Dispose on this object. NonBlockingConsole.WriteLine($"[ERROR] {ex.Message}"); break; } } } } } catch (Exception ex) { NonBlockingConsole.WriteLine($"[ERROR] {ex.Message}"); } } }