예제 #1
0
        public GestureDetector(KinectSensor kinectSensor, ComModule coms)
        {
            this.coms = coms;
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();

            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }
            VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabasePath);

            if (database == null)
            {
                Console.WriteLine("No gesture database!");
                Environment.Exit(1);
            }

            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gesture-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "GESTURES");                  // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());

            this.vgbFrameSource.AddGestures(database.AvailableGestures);
            fpsCounter         = 0;
            gestureWasDetected = false;
        }
예제 #2
0
        public SpeechMod(System.Windows.Shapes.Ellipse circle, System.Windows.Threading.Dispatcher dispatcher)
        {
            this.circle     = circle;
            this.Dispatcher = dispatcher;

            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            // load speech synthetizer
            tts = new Tts();

            // introduce assistant
            Speak("Olá, eu sou o teu assistente de viagens. Tenho todo o gosto em ajudar-te a planear as tuas férias de sonho. Podes saber mais sobre mim dizendo: preciso de ajuda.", 12);
        }
예제 #3
0
        public SpeechMod()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                 //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            // load speech sintesizer
            tts = new Tts();

            //onRecognized(new SpeechEventArg() { Text = "MUTE", Confidence = 100, Final = true, AssistantActive = assistantActive });

            // send command
            // format {"recognized":["SHAPE","COLOR"]}

            /*string json = "{ \"recognized\": [\"MUTE\"] }";
             * Console.WriteLine(json);
             * var exNot = lce.ExtensionNotification("","", 100, json);
             * mmic.Send(exNot);*/

            // introduce assistant
            //Speak("Olá, eu sou a Maria, a tua assistente pessoal. Tenho todo o gosto em ajudar-te com algumas tarefas no teu computador. Podes saber mais sobre mim dizendo: ajuda. Sou um pouco distraída, por isso sempre que quiseres chamar por mim diz: ó Maria!");
        }
예제 #4
0
        public async Task RunBotAsync()
        {
            _client   = new DiscordSocketClient();
            _commands = new CommandService();
            var comModule = new Coms(_userNick);

            _mmiComms = comModule.GetMmic();
            _service  = new ServiceCollection()
                        .AddSingleton(_client)
                        .AddSingleton(_commands)
                        .BuildServiceProvider();
            _tts             = new Tts(comModule);
            _speechTemplates = new SpeechTemplates();

            _client.Log += Log;
            await RegisterCommandsAsync();

            _mmiComms.Message += MmiC_Message; // subscribe to the messages that come from the comMudole
            _mmiComms.Start();


            await _client.LoginAsync(TokenType.Bot, _botToken);


            await _client.StartAsync();

            _tts.Speak("Olá eu sou o wally, o teu bot do Discord! Se desejares podes-me perguntar o que é que eu sou capaz de fazer e que comandos estão disponíveis. So tenho um requisito, diz o meu nome antes de qualquer comando.");

            Thread.Sleep(2000);

            comModule.SendGuildInfo(_client.Guilds);
            await Task.Delay(-1);
        }
예제 #5
0
파일: SpeechMod.cs 프로젝트: NunoArmas/IM
        public SpeechMod()
        {
            string sound_path = System.IO.Directory.GetCurrentDirectory() + @"\msg_sound.wav";

            ring = new MediaPlayer();
            ring.Open(new Uri(sound_path));
            ring.Volume = 0.10;

            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());


            lena = new Tts();
            //lena.Speak("Bom dia. Eu sou a Lena.");
            //iniciate connection to socket

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);


            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;
        }
        public MainWindow()
        {
            InitializeComponent();

            this.main = this;

            this.screenWidth  = System.Windows.SystemParameters.PrimaryScreenWidth;
            this.screenHeight = System.Windows.SystemParameters.PrimaryScreenHeight;

            this.Height = 55;
            this.Width  = this.maxWindowWidth;

            this.Top  = screenHeight - this.Height + 7;
            this.Left = screenWidth - this.Width + 7;

            this.Topmost = true;

            this.Show();

            this.sm = new Speaking();

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();

            //UpdateKinectStatus("active");
        }
예제 #7
0
        public MainWindow()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "IM", "speech-1", "acoustic", "command");
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");
            tts  = new TTS();

            mmic.Send(lce.NewContextRequest());
            InitializeComponent();

            var sensor = KinectSensor.GetDefault();

            if (sensor != null)
            {
                _close_hand_gesture.GestureRecognized               += Gesture_Recognized;
                _open_hand_gesture.GestureRecognized                += Gesture_Recognized;
                _swipe_right_gesture.GestureRecognized              += Gesture_Recognized;
                _swipe_left_gesture.GestureRecognized               += Gesture_Recognized;
                _swipe_down_gesture.GestureRecognized               += Gesture_Recognized;
                _swipe_up_gesture.GestureRecognized                 += Gesture_Recognized;
                _rotate_clockwise_gesture.GestureRecognized         += Gesture_Recognized;
                _rotate_counter_clockwise_gesture.GestureRecognized += Gesture_Recognized;
                _cross_gesture.GestureRecognized      += Gesture_Recognized;
                _close_ears_gesture.GestureRecognized += Gesture_Recognized;

                sensor.Open();
            }
            this.Loaded += OnLoaded;
        }
예제 #8
0
        public MainWindow()
        {
            UserCredential credentials;

            using (var stream =
                       new FileStream("credentials.json", FileMode.Open, FileAccess.Read))
            {
                // The file token.json stores the user's access and refresh tokens, and is created
                // automatically when the authorization flow completes for the first time.
                string credPath = "token.json";
                credentials = GoogleWebAuthorizationBroker.AuthorizeAsync(
                    GoogleClientSecrets.Load(stream).Secrets,
                    Scopes,
                    "user",
                    CancellationToken.None,
                    new FileDataStore(credPath, true)).Result;
                Console.WriteLine("Credential file saved to: " + credPath);
            }

            // Create Google Calendar API service.
            service = new CalendarService(new BaseClientService.Initializer()
            {
                HttpClientInitializer = credentials,
                ApplicationName       = ApplicationName,
            });

            InitializeComponent();


            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
예제 #9
0
        public MainWindow()
        {
            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command");
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");
            mmic.Send(lce.NewContextRequest());
            InitializeComponent();

            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // open the sensor
            this.kinectSensor.Open();

            // set the initial status text
            this.UpdateKinectStatusText();

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the GestureDetector object
            this.gestureResultView = new GestureResultView(false, 0, 0);
            this.gestureDetector   = new GestureDetector(this.kinectSensor, this.gestureResultView);

            // set data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            //this.gestureResultGrid.DataContext = this.gestureResultView;
        }
예제 #10
0
        public GestureMod()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gesture-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 8000, "User2", "GESTURES");                  // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());
        }
예제 #11
0
        public MainWindow()
        {
            worker = new SpotifyWorker();

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
예제 #12
0
        public MainWindow()
        {
            InitializeComponent();


            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
예제 #13
0
 public MainWindow()
 {
     InitializeComponent();
     _t    = new Tts();
     _calc = new Calculator();
     _t.Speak(chooseRandomSpeech("greeting"));
     mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
     mmiC.Message += MmiC_Message;
     mmiC.Start();
 }
        public MainWindow()
        {
            InitializeComponent();

            mpcHomeCinema = new MPCHomeCinema("http://localhost:13579");

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
예제 #15
0
 /// <summary>
 /// Initializes a new instance of the GestureResultView class and sets initial property values
 /// </summary>
 /// <param name="bodyIndex">Body Index associated with the current gesture detector</param>
 /// <param name="isTracked">True, if the body is currently tracked</param>
 /// <param name="detected">True, if the gesture is currently detected for the associated body</param>
 /// <param name="confidence">Confidence value for detection of the 'Seated' gesture</param>
 public GuiClass(int bodyIndex, bool isTracked, bool detected, float confidence, LifeCycleEvents lce, MmiCommunication mmic)
 {
     this.BodyIndex   = bodyIndex;
     this.IsTracked   = isTracked;
     this.Detected    = detected;
     this.Confidence  = confidence;
     this.ImageSource = this.notTrackedImage;
     this.lce         = lce;
     this.mmic        = mmic;
 }
예제 #16
0
        public Form1()
        {
            InitializeComponent();
            this.coord.GetLocationEvent();

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            command       = new browserCommands();
            mmiC.Message += MmiC_Message;

            mmiC.Start();
        }
예제 #17
0
        public MainWindow()
        {
            InitializeComponent();

            _client = new DiscordWebhookClient(643897438297391124, "-Jpr2aw_HQeS5iLVhC0TQzc7d9y4tTAp55aZZgfBvJvDuKjjmfxYGgcEEcLe3lnnAwyF");

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
            t = new Tts();
        }
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                        //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());



            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("gestureResultView");
            }

            this.GestureResultView = gestureResultView;

            // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;

            // open the reader for the vgb frames
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();
            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }

            // load the 'Seated' gesture from the gesture database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase))
            {
                // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures),
                // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name
                foreach (Gesture gesture in database.AvailableGestures)
                {
                    if (gesture.Name.Equals(this.crouch_gesture) ||
                        gesture.Name.Equals(this.dab_gesture) ||
                        gesture.Name.Equals(this.hey_gesture) ||
                        gesture.Name.Equals(this.hold_gesture) ||
                        gesture.Name.Equals(this.reload_gesture))
                    {
                        this.vgbFrameSource.AddGesture(gesture);
                    }
                }
            }
        }
        /// <summary>
        /// Initializes a new instance of the GestureResultView class and sets initial property values
        /// </summary>
        /// <param name="bodyIndex">Body Index associated with the current gesture detector</param>
        /// <param name="isTracked">True, if the body is currently tracked</param>
        /// <param name="detected">True, if the gesture is currently detected for the associated body</param>
        /// <param name="confidence">Confidence value for detection of the 'Seated' gesture</param>
        public GestureResultView(int bodyIndex, bool isTracked, bool detected, float confidence, LifeCycleEvents lce, MmiCommunication mmic, MainWindow main)
        {
            this.BodyIndex   = bodyIndex;
            this.IsTracked   = isTracked;
            this.Detected    = detected;
            this.Confidence  = confidence;
            this.ImageSource = this.notTrackedImage;
            this.lce         = lce;
            this.mmic        = mmic;
            this.main        = main;

            //main.SetState("deactive");
        }
예제 #20
0
        public MainWindow()
        {
            InitializeComponent();


            lce_speechMod  = new LifeCycleEvents("ASR", "FUSION", "speech-2", "acoustic", "command");
            mmic_speechMod = new MmiCommunication("localhost", 8000, "User2", "ASR");
            mmic_speechMod.Send(lce_speechMod.NewContextRequest());

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
예제 #21
0
        public SecondMod()
        {
            //init LifeCycleEvents..


            // CHANGED FOR FUSION ---------------------------------------

            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command");
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");  //CHANGED To user1

            // END CHANGED FOR FUSION------------------------------------

            mmic.Send(lce.NewContextRequest());
        }
예제 #22
0
 public MainWindow()
 {
     InitializeComponent();
     _t    = new Tts();
     _calc = new Calculator();
     _t.Speak(chooseRandomSpeech("greeting"));
     _beggining = true; _confirmation = false;
     _lastNum1  = ""; _lastNum2 = ""; _lastOp = "";
     confidence = 0;
     InitializeComponent();
     mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
     mmiC.Message += MmiC_Message;
     mmiC.Start();
 }
예제 #23
0
파일: Form2.cs 프로젝트: davidd1995/IM2
        public Form2()
        {
            InitializeComponent();
            t.Speak("Olá, bem vindo ao Web browser!");
            t.Speak("Vamos começar!");
            t.Speak("Em que posso ajudar?");
            webBrowser1.ScriptErrorsSuppressed = true;
            webBrowser1.GoHome();


            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
예제 #24
0
        public MainWindow()
        {
            /* Start API controllers */
            //this.worker = new SpotifyWorker();
            this.local_worker = new SpotifyLocalWorker();
            this.web_worker   = new SpotifyWebWorker();

            /* Start Text to Speech Module - TTS */
            this.tts = new TTS();

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
예제 #25
0
        public MainWindow()
        {
            InitializeComponent();


            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
            oPowerPoint   = new PowerPoint.Application();
            oPresentation = oPowerPoint.Presentations.Add();
            examplePresentation();
            openpowerpoint   = true;
            presentationMode = false;
        }
예제 #26
0
        public SpeechMod(TextBox textBox)
        {
            specialCharacters = new List <Tuple <string, string> >();
            specialCharacters.Add(new Tuple <string, string>("é", "(e)"));
            specialCharacters.Add(new Tuple <string, string>("ê", "(e_acent)"));
            specialCharacters.Add(new Tuple <string, string>("í", "(i)"));
            specialCharacters.Add(new Tuple <string, string>("ç", "(c)"));
            specialCharacters.Add(new Tuple <string, string>("ã", "(a_till)"));
            specialCharacters.Add(new Tuple <string, string>("à", "(a_haver)"));
            specialCharacters.Add(new Tuple <string, string>("á", "(a_acent)"));
            specialCharacters.Add(new Tuple <string, string>("â", "(a_cir)"));
            specialCharacters.Add(new Tuple <string, string>("õ", "(o_till)"));
            specialCharacters.Add(new Tuple <string, string>("ó", "(o_acent)"));


            Console.WriteLine("OK...");
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                 //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());



            //load pt recognizer
            sre            = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            grMain         = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml");
            grMain.Name    = "Main Grammar";
            grMain.Enabled = true;

            grYesNo         = new Grammar(Environment.CurrentDirectory + "\\yesNoGrammar.grxml");
            grYesNo.Name    = "YesNo Grammar";
            grYesNo.Enabled = false;

            sre.LoadGrammar(grMain);
            sre.LoadGrammar(grYesNo);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            //server to receive commands from APP!!
            appServer = new AppServer(sre, textBox, resetGrammar);
            appServer.run();
        }
예제 #27
0
        // Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                        //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());
            count = 0;

            if (kinectSensor == null)
            {
                throw new ArgumentNullException("Kinect Sensor is null");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("Gesture Result View is null");
            }

            GestureResultView = gestureResultView;

            // Create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            vgbFrameSource.TrackingIdLost += Source_TrackingIdLost;

            // Open the reader for the vgb frames
            vgbFrameReader = vgbFrameSource.OpenReader();
            if (vgbFrameReader != null)
            {
                vgbFrameReader.IsPaused      = true;
                vgbFrameReader.FrameArrived += Reader_GestureFrameArrived;
            }

            // Load gestures from database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabase))
            {
                foreach (Gesture gesture in database.AvailableGestures)
                {
                    if (gesture.Name.Equals(stop) || gesture.Name.Equals(back) || gesture.Name.Equals(skip) ||
                        gesture.Name.Equals(vdown) || gesture.Name.Equals(vup))
                    {
                        vgbFrameSource.AddGesture(gesture);
                    }
                }
            }
        }
예제 #28
0
        public MainWindow()
        {
            InitializeComponent();

            PresentationPage initialPage = new PresentationPage(this);

            this.Content = initialPage;

            speakFinish = true;

            AppDomain.CurrentDomain.ProcessExit += CurrentDomain_ProcessExit;
            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();

            dManager = new ModalitiesManager(this);
        }
예제 #29
0
        public MainWindow()
        {
            //init LifeCycleEvents..
            lce = new LifeCycleEvents("KINECT", "FUSION", "gesture-1", "gesture", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User2", "KINECT");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //this.kinect.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            OnOpenSensor();
            InitializeComponent();
            OnLoadGestureFromDBd();
            OnOpenReaders();

            Closed += OnWindowClosing;
        }
예제 #30
0
        public SpeechMod()
        {
            //Initialize LifeCycleEvents.
            LCE  = new LifeCycleEvents("ASR", "IM", "speech-1", "acoustic", "command");
            MMIC = new MmiCommunication("localhost", 8000, "User1", "ASR");

            MMIC.Send(LCE.NewContextRequest());

            //load pt recognizer
            SRE = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "basicCmd");
            SRE.LoadGrammar(gr);

            SRE.SetInputToDefaultAudioDevice();
            SRE.RecognizeAsync(RecognizeMode.Multiple);
            SRE.SpeechRecognized   += SRE_SpeechRecognized;
            SRE.SpeechHypothesized += SRE_SpeechHypothesized;
        }