public Form1() { InitializeComponent(); KeyboardHook.KeyDown += KeyboardHook_KeyDown; KeyboardHook.KeyUp += KeyboardHook_KeyUp; KeyboardHook.InstallHook(); }
public fMediaPlayer() { InitializeComponent(); timer1.Start(); AllowDrop = true; DragDrop += new DragEventHandler(Form1_DragDrop); DragEnter += new DragEventHandler(Form1_DragEnter); WMP.Size = DefaultSizeModeWithAudio; WMP.Height -= tbVolume.Height + 40; WMP.settings.autoStart = true; // Установка стилей для кнопок // SetStyle(MediaPlayer.styleImage); //HOOK SPACE Где-то не работает как нужно объект KeyboardHook.KeyboardUp += new KeyEventHandler(KeyboardHook_KeyboardClick); KeyboardHook.LocalHook = false; KeyboardHook.InstallHook(); MouseHook.MouseUp += new MouseEventHandler(MouseHook_MouseClick); MouseHook.LocalHook = false; MouseHook.InstallHook(); Application.AddMessageFilter(this); //loadHook(); //lbMusicList.BackgroundImage = System.Drawing.Image.FromFile(@"D:\AirSendit\grafmen9999_5ae762e8-30ea-4f1b-a266-4faa64ce0f2b\anime-art\30306773747_5facb8627c_o.jpg"); }
private void Form1_Load(object sender, EventArgs e) { //mouseHook.MouseEvent += MouseHook_MouseEvent; //mouseHook.InstallHook(); keyBoardHook.KeyboardEvent += KeyBoardHook_KeyboardEvent; keyBoardHook.InstallHook(); }
private void Form1_Load(object sender, EventArgs e) { _helper = new SwagHelper(); KeyboardHook.KeyUp += KeyUpHandler; KeyboardHook.KeyDown += KeyDownHandler; KeyboardHook.InstallHook(); MouseHook.MouseMove += new MouseHook.MouseMoveEventHandler(MouseMoveHandler); MouseHook.MouseEvent += new MouseHook.MouseEventEventHandler(MouseClickHandler); MouseHook.InstallHook(); }
private void Form1_Load(object sender, EventArgs e) { KeyboardHook.KeyDown += new KeyboardHook.KeyDownEventHandler(KeyboardHook_KeyDown); KeyboardHook.KeyUp += new KeyboardHook.KeyUpEventHandler(KeyboardHook_KeyUp); KeyboardHook.InstallHook(); Thread t = new Thread(new ThreadStart(ThreadProc)); t.Start(); }
private void InitWindowsHookLib() { _keyHook.InstallHook(); _keyHook.KeyDown += new EventHandler <KeyboardEventArgs>(KeyDown); _keyHook.KeyUp += new EventHandler <KeyboardEventArgs>(KeyUp); _mouseHook.InstallHook(); _mouseHook.MouseDown += new EventHandler <WindowsHookLib.MouseEventArgs>(MouseDown); _mouseHook.MouseUp += new EventHandler <WindowsHookLib.MouseEventArgs>(MouseUp); _mouseHook.MouseMove += new EventHandler <WindowsHookLib.MouseEventArgs>(MouseMove); }
/// <summary> /// Attaches the input provider to listen for the given settings. /// </summary> /// <param name="settings">The settings provided by <see cref="IInputProvider.EndRecord()"/>.</param> public void Attach(IntPtr window, string settings) { if (!String.IsNullOrEmpty(settings)) { Parse(settings); } khook = new KeyboardHook(); khook.InstallHook(); khook.KeyboardEvent += OnKeyboardEvent; }
public IRCEmulationControl() { //Adding keyboard event handlers and installing the hook KeyboardHook.KeyDown += new KeyboardHook.KeyDownEventHandler(KeyboardHook_KeyDown); KeyboardHook.KeyUp += new KeyboardHook.KeyUpEventHandler(KeyboardHook_KeyUp); KeyboardHook.InstallHook(); InitializeComponent(); getEmulatorHandle(); // If a settings.cfg exists, let's load in the saved settings. if (File.Exists("settings.cfg")) { StreamReader sr = new StreamReader("settings.cfg"); string keystring = sr.ReadToEnd(); sr.Close(); string[] settings = keystring.Substring(0, keystring.LastIndexOf(',')).Split(','); int settingsCount = settings.Count(); // Only load the settings if all 4 settings are present. if (settingsCount == 4) { txtNick.Text = settings[0].ToString(); txtIRCChannel.Text = settings[1].ToString(); txtIRCServer.Text = settings[2].ToString(); txtPassword.Text = settings[3].ToString(); } // Only load the settings if all 4 settings are present. if (settingsCount == 5) { txtNick.Text = settings[0].ToString(); txtIRCChannel.Text = settings[1].ToString(); txtIRCServer.Text = settings[2].ToString(); txtPassword.Text = settings[3].ToString(); try { sleepDelay = Convert.ToInt32(settings[4]); } catch (Exception e) { sleepDelay = 100; } } // TO DO: Load key bindings loadKeyConfig(); } }
private void MainDisplay_Load(object sender, EventArgs e) { //Adding keyboard event handlers and installing the hook KeyboardHook.KeyDown += new KeyboardHook.KeyDownEventHandler(KeyboardHook_KeyDown); KeyboardHook.KeyUp += new KeyboardHook.KeyUpEventHandler(KeyboardHook_KeyUp); KeyboardHook.InstallHook(); //Adding mouse event handlers and installing the hook //MouseHook.MouseEvent += new MouseHook.MouseEventEventHandler(MouseHook_MouseEvent); //MouseHook.MouseMove += new MouseHook.MouseMoveEventHandler(MouseHook_MouseMove); //MouseHook.WheelEvent += new MouseHook.WheelEventEventHandler(MouseHook_WheelEvent); //MouseHook.InstallHook(); }
public MainForm() { InitializeComponent(); AllowTransparency = true; presenter = new MainFormPresenter(this, new JediSettings()); mouseHook = new MouseHook(); mouseHook.MouseEvent += MouseHookMouseEvent; keyboardHook = new KeyboardHook(); keyboardHook.KeyboardEvent += KeyboardHookKeyboardEvent; mgr.ShortcutActivated += mgr_OnShortcutActivated; keyboardHook.InstallHook(); }
public MainWindow() { InitializeComponent(); KeyboardHook.KeyDown += KeyboardHook_KeyDown; KeyboardHook.InstallHook(); DefaultConfig = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "Default.mmm"); if (File.Exists(DefaultConfig)) { try { LoadFile(DefaultConfig); chkRemember.IsChecked = true; } catch (Exception) { } } }
public botControl(string emRunning, string emControlSetting, string profile) { Console.WriteLine("Bot created..\r"); //Adding keyboard event handlers and installing the hook KeyboardHook.KeyDown += new KeyboardHook.KeyDownEventHandler(KeyboardHook_KeyDown); KeyboardHook.KeyUp += new KeyboardHook.KeyUpEventHandler(KeyboardHook_KeyUp); KeyboardHook.InstallHook(); // Initalize random randnum = new Random(); this.emulatorRunning = emRunning; this.emulatorControlSetting = emControlSetting; this.profileName = profile; if (this.emulatorControlSetting == "NES-NOSTART") { useStartButton = false; this.emulatorControlSetting = "NES"; } if (this.emulatorControlSetting == "NES-START") { useStartButton = true; this.emulatorControlSetting = "NES"; } if (this.emulatorRunning == "custom") { this.customEmulatorName = emControlSetting; } Console.WriteLine("emulator running: " + this.emulatorRunning + "\r"); Console.WriteLine("emulator control setting: " + this.emulatorControlSetting + "\r"); buildEmuControlList(); getEmulatorHandle(); Console.WriteLine("Handle: " + handle); botRun = new Thread(new ThreadStart(botCommandRun)); // Begin the control. botRun.Start(); }
private void CoordLock_CheckedChanged(object sender, EventArgs e) { if (CoordLock.Checked == true) { SaveHookHotkey(); _keyboardHook.InstallHook(HookedKeyPress); Hotkeycode = Simulator.Transcoding(HKcombo.Text); checkCtrl.Enabled = false; checkShift.Enabled = false; HKcombo.Enabled = false; } else { _keyboardHook.UninstallHook(); checkCtrl.Enabled = true; checkShift.Enabled = true; HKcombo.Enabled = true; } }
public MainForm() { InitializeComponent(); AllowTransparency = true; presenter = new MainFormPresenter(this, new JediSettings()); mouseHook = new MouseHook(); mouseHook.MouseEvent += MouseHookMouseEvent; keyboardHook = new KeyboardHook(); keyboardHook.KeyboardEvent += KeyboardHookKeyboardEvent; mgr.ShortcutActivated += mgr_OnShortcutActivated; keyboardHook.InstallHook(); if (Settings.Default.CleanLogfile && !string.IsNullOrEmpty(Settings.Default.Logfile)) { using (System.IO.StreamWriter file = new System.IO.StreamWriter(Settings.Default.Logfile)) { file.Write(""); } } }
private void Form1_Load(object sender, EventArgs e) { //SET HOTKEYS KeyboardHook.KeyDown += new KeyboardHook.KeyDownEventHandler(KeyboardHook_KeyDown); KeyboardHook.KeyUp += new KeyboardHook.KeyUpEventHandler(KeyboardHook_KeyUp); KeyboardHook.InstallHook(); //CFG cfg.doData(); rkey = cfg.getHotkey(); running.dell = cfg.getDelay(); //SET LABEL HOTKEY KeysConverter hk = new KeysConverter(); labelHotkey.Text = "Hotkey : " + hk.ConvertToString(rkey); //SET LABEL DELAY labelDelay.Text = "Delay : " + running.dell.ToString() + "ms"; running.setStatus(statusLabel, false); }
public bool LoadListen() { // Don't allocate anything if we have no phrases to hook. if (App.ActiveProfile == null) { return(false); } if (App.ActiveProfile.ProfileTriggers != null && App.ActiveProfile.ProfileTriggers.Count == 0) { Diagnostics.Log("LoadListen() called without a trigger added."); MessageBox.Show("At least one Trigger must be added!"); return(false); } _synthesizer = App.ActiveProfile.Synthesizer; _synthesizer.SelectVoice(App.Settings.VoiceInfo); _speechRecognitionEngine = new SpeechRecognitionEngine(App.Settings.RecognizerInfo); GrammarBuilder grammarPhrases = new GrammarBuilder { Culture = App.Settings.RecognizerInfo }; // Grammar must match speech recognition language localization List <string> glossary = new List <string>(); // Add trigger phrases to glossary of voice recognition engine. if (App.ActiveProfile.ProfileTriggers != null) { glossary.AddRange(from trigger in App.ActiveProfile.ProfileTriggers let phrase = (Phrase)trigger select trigger.Value); } grammarPhrases.Append(new Choices(glossary.ToArray())); _speechRecognitionEngine.LoadGrammar(new Grammar(grammarPhrases)); // event function hook _speechRecognitionEngine.SpeechRecognized += PhraseRecognized; _speechRecognitionEngine.SpeechRecognitionRejected += Recognizer_SpeechRecognitionRejected; try { _speechRecognitionEngine.SetInputToDefaultAudioDevice(); } catch (InvalidOperationException e) { Diagnostics.Log(e, "No microphone was detected."); MessageBox.Show("No microphone was detected!", "Error", MessageBoxButton.OK, MessageBoxImage.Error); return(false); } catch (Exception e) { Diagnostics.Log(e, "An Unknown error occured when attempting to set default input device."); MessageBox.Show("An unknown error has occured, contact support if the problem persists.", "Error", MessageBoxButton.OK, MessageBoxImage.Error); return(false); } _speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple); // subscribe to Push-to-Talk key hooks. KeyboardHook.KeyDown += _pushToTalkKeyDownHook; KeyboardHook.KeyUp += _pushToTalkKeyUpHook; KeyboardHook.InstallHook(); if (App.Settings.PushToTalkMode != "Hold" && App.Settings.PushToTalkMode != "Toggle" && App.Settings.PushToTalkMode != "Single") { _pushToTalkActive = true; } // successfully established an instance of SAPI engine with well-formed grammar. IsListening = true; return(true); }
// // public bool load_listen() // // load_listen() establishes the speech recognition engine based on the command glossary stored within the // currently loaded Profile. load_listen() may fail, returning Boolean FALSE, if a Profile's glossary does // not meet the engine's grammar requirements; load_listen() will also fail, returning Boolean FALSE, should // an exception occur that cannot be resolved within the method. load_listen() will return Boolean TRUE upon // success. // public bool load_listen() { vi_syn = GAVPI.vi_profile.synth; vi_syn.SelectVoice(GAVPI.vi_settings.voice_info); vi_sre = new SpeechRecognitionEngine(GAVPI.vi_settings.recognizer_info); GrammarBuilder phrases_grammar = new GrammarBuilder(); // Grammer must match speech recognition language localization phrases_grammar.Culture = GAVPI.vi_settings.recognizer_info; List <string> glossory = new List <string>(); foreach (VI_Phrase trigger in GAVPI.vi_profile.Profile_Triggers) { glossory.Add(trigger.value); } if (glossory.Count == 0) { MessageBox.Show("You need to add at least one Trigger"); return(false); } phrases_grammar.Append(new Choices(glossory.ToArray())); vi_sre.LoadGrammar(new Grammar(phrases_grammar)); //set event function vi_sre.SpeechRecognized += phraseRecognized; vi_sre.SpeechRecognitionRejected += _recognizer_SpeechRecognitionRejected; try { vi_sre.SetInputToDefaultAudioDevice(); } catch (InvalidOperationException exception) { // For the time being, we're only catching failures to address an input device (typically a // microphone). MessageBox.Show("Have you connected a microphone to this computer?\n\n" + "Please ensure that you have successfull connected and configured\n" + "your microphone before trying again.", "I cannot hear you!", MessageBoxButtons.OK, MessageBoxIcon.Exclamation, MessageBoxDefaultButton.Button1); return(false); } vi_sre.RecognizeAsync(RecognizeMode.Multiple); // TODO: // Push-to-Talk keyboard hook. Unimplemented. try { KeyboardHook.KeyDown += pushtotalk_keyDownHook; KeyboardHook.KeyUp += pushtotalk_keyUpHook; KeyboardHook.InstallHook(); } catch (OverflowException exception) { // TODO: // InputManager library, which we rely upon, has issues with .Net 4.5 and throws an Overflow exception. // We'll catch it here and pretty much let it go for now (since Push-to-Talk isn't implemented yet) // with the intent of resolving it later. } if (GAVPI.vi_settings.pushtotalk_mode != "Hold" && GAVPI.vi_settings.pushtotalk_mode != "PressOnce") { pushtotalk_active = true; } // We have successfully establish an instance of a SAPI engine with a well-formed grammar. IsListening = true; return(true); }
private void InitHotKey(System.Windows.Window win) { _keyboardHook = new KeyboardHook(); _keyboardHook.InstallHook(OnKeyPress); }
// // public bool load_listen() // // load_listen() establishes the speech recognition engine based on the command glossary stored within the // currently loaded Profile. load_listen() may fail, returning Boolean FALSE, if a Profile's glossary does // not meet the engine's grammar requirements; load_listen() will also fail, returning Boolean FALSE, should // an exception occur that cannot be resolved within the method. load_listen() will return Boolean TRUE upon // success. // // Optimizations : 04.28.15 // public bool load_listen() { // Don't allocate anything if we have no phrases to hook. if (GAVPI.Profile.Profile_Triggers != null && GAVPI.Profile.Profile_Triggers.Count == 0) { MessageBox.Show("You need to add at least one Trigger"); return(false); } synth = GAVPI.Profile.synth; synth.SelectVoice(GAVPI.Settings.voice_info); speech_re = new SpeechRecognitionEngine(GAVPI.Settings.recognizer_info); GrammarBuilder phrases_grammar = new GrammarBuilder(); // Grammer must match speech recognition language localization phrases_grammar.Culture = GAVPI.Settings.recognizer_info; List <string> glossory = new List <string>(); // Add trigger phrases to glossory of voice recognition engine. foreach (Phrase trigger in GAVPI.Profile.Profile_Triggers) { glossory.Add(trigger.value); } phrases_grammar.Append(new Choices(glossory.ToArray())); speech_re.LoadGrammar(new Grammar(phrases_grammar)); // event function hook speech_re.SpeechRecognized += phraseRecognized; speech_re.SpeechRecognitionRejected += _recognizer_SpeechRecognitionRejected; try { speech_re.SetInputToDefaultAudioDevice(); } catch (InvalidOperationException exception) { // For the time being, we're only catching failures to address an input device (typically a // microphone). MessageBox.Show("Have you connected a microphone to this computer?\n\n" + "Please ensure that you have successfull connected and configured\n" + "your microphone before trying again.", "I cannot hear you! (" + exception.Message + ")", MessageBoxButtons.OK, MessageBoxIcon.Exclamation, MessageBoxDefaultButton.Button1); return(false); } speech_re.RecognizeAsync(RecognizeMode.Multiple); // Install Push to talk key hooks. KeyboardHook.KeyDown += pushtotalk_keyDownHook; KeyboardHook.KeyUp += pushtotalk_keyUpHook; KeyboardHook.InstallHook(); if (GAVPI.Settings.pushtotalk_mode != "Hold" && GAVPI.Settings.pushtotalk_mode != "Toggle" && GAVPI.Settings.pushtotalk_mode != "Single") { pushtotalk_active = true; } // We have successfully establish an instance of a SAPI engine with a well-formed grammar. IsListening = true; return(true); }
public bool LoadListen() { // Don't allocate anything if we have no phrases to hook. if (App.ActiveProfile == null) { return(false); } if (App.ActiveProfile.ProfileTriggers != null && App.ActiveProfile.ProfileTriggers.Count == 0) { MessageBox.Show("You need to add at least one Trigger"); return(false); } synthesizer = App.ActiveProfile.Synthesizer; synthesizer.SelectVoice(App.Settings.VoiceInfo); speechRecognitionEngine = new SpeechRecognitionEngine(App.Settings.RecognizerInfo); GrammarBuilder grammarPhrases = new GrammarBuilder { Culture = App.Settings.RecognizerInfo }; // Grammar must match speech recognition language localization List <string> glossary = new List <string>(); // Add trigger phrases to glossary of voice recognition engine. if (App.ActiveProfile.ProfileTriggers != null) { glossary.AddRange(from trigger in App.ActiveProfile.ProfileTriggers let phrase = (Phrase)trigger select trigger.Value); } grammarPhrases.Append(new Choices(glossary.ToArray())); speechRecognitionEngine.LoadGrammar(new Grammar(grammarPhrases)); // event function hook speechRecognitionEngine.SpeechRecognized += PhraseRecognized; speechRecognitionEngine.SpeechRecognitionRejected += Recognizer_SpeechRecognitionRejected; try { speechRecognitionEngine.SetInputToDefaultAudioDevice(); } catch (InvalidOperationException exception) { // For the time being, we're only catching failures to address an input device (typically a // microphone). // TODO: Show error message indicating a microphone was not detected. return(false); } catch (Exception) { // TODO: Show unknown error message here. return(false); } speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple); // Install Push to talk key hooks. KeyboardHook.KeyDown += pushToTalkKeyDownHook; KeyboardHook.KeyUp += pushToTalkKeyUpHook; KeyboardHook.InstallHook(); if (App.Settings.PushToTalkMode != "Hold" && App.Settings.PushToTalkMode != "Toggle" && App.Settings.PushToTalkMode != "Single") { pushToTalkActive = true; } // We have successfully establish an instance of a SAPI engine with a well-formed grammar. IsListening = true; return(true); }
private void start_Click(object sender, EventArgs e) { _mouseHook.InstallHook(); _keyboardHook.InstallHook(); }