Пример #1
0
        public SpeechMod(System.Windows.Shapes.Ellipse circle, System.Windows.Threading.Dispatcher dispatcher)
        {
            this.circle     = circle;
            this.Dispatcher = dispatcher;

            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            // load speech synthetizer
            tts = new Tts();

            // introduce assistant
            Speak("Olá, eu sou o teu assistente de viagens. Tenho todo o gosto em ajudar-te a planear as tuas férias de sonho. Podes saber mais sobre mim dizendo: preciso de ajuda.", 12);
        }
Пример #2
0
        public MainWindow()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "IM", "speech-1", "acoustic", "command");
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");
            tts  = new TTS();

            mmic.Send(lce.NewContextRequest());
            InitializeComponent();

            var sensor = KinectSensor.GetDefault();

            if (sensor != null)
            {
                _close_hand_gesture.GestureRecognized               += Gesture_Recognized;
                _open_hand_gesture.GestureRecognized                += Gesture_Recognized;
                _swipe_right_gesture.GestureRecognized              += Gesture_Recognized;
                _swipe_left_gesture.GestureRecognized               += Gesture_Recognized;
                _swipe_down_gesture.GestureRecognized               += Gesture_Recognized;
                _swipe_up_gesture.GestureRecognized                 += Gesture_Recognized;
                _rotate_clockwise_gesture.GestureRecognized         += Gesture_Recognized;
                _rotate_counter_clockwise_gesture.GestureRecognized += Gesture_Recognized;
                _cross_gesture.GestureRecognized      += Gesture_Recognized;
                _close_ears_gesture.GestureRecognized += Gesture_Recognized;

                sensor.Open();
            }
            this.Loaded += OnLoaded;
        }
Пример #3
0
        public SpeechMod()
        {
            string sound_path = System.IO.Directory.GetCurrentDirectory() + @"\msg_sound.wav";

            ring = new MediaPlayer();
            ring.Open(new Uri(sound_path));
            ring.Volume = 0.10;

            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());


            lena = new Tts();
            //lena.Speak("Bom dia. Eu sou a Lena.");
            //iniciate connection to socket

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);


            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;
        }
Пример #4
0
        public SpeechMod()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                 //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            // load speech sintesizer
            tts = new Tts();

            //onRecognized(new SpeechEventArg() { Text = "MUTE", Confidence = 100, Final = true, AssistantActive = assistantActive });

            // send command
            // format {"recognized":["SHAPE","COLOR"]}

            /*string json = "{ \"recognized\": [\"MUTE\"] }";
             * Console.WriteLine(json);
             * var exNot = lce.ExtensionNotification("","", 100, json);
             * mmic.Send(exNot);*/

            // introduce assistant
            //Speak("Olá, eu sou a Maria, a tua assistente pessoal. Tenho todo o gosto em ajudar-te com algumas tarefas no teu computador. Podes saber mais sobre mim dizendo: ajuda. Sou um pouco distraída, por isso sempre que quiseres chamar por mim diz: ó Maria!");
        }
Пример #5
0
        public GestureDetector(KinectSensor kinectSensor, ComModule coms)
        {
            this.coms = coms;
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();

            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }
            VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabasePath);

            if (database == null)
            {
                Console.WriteLine("No gesture database!");
                Environment.Exit(1);
            }

            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gesture-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "GESTURES");                  // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());

            this.vgbFrameSource.AddGestures(database.AvailableGestures);
            fpsCounter         = 0;
            gestureWasDetected = false;
        }
Пример #6
0
        public MainWindow()
        {
            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command");
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");
            mmic.Send(lce.NewContextRequest());
            InitializeComponent();

            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // open the sensor
            this.kinectSensor.Open();

            // set the initial status text
            this.UpdateKinectStatusText();

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the GestureDetector object
            this.gestureResultView = new GestureResultView(false, 0, 0);
            this.gestureDetector   = new GestureDetector(this.kinectSensor, this.gestureResultView);

            // set data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            //this.gestureResultGrid.DataContext = this.gestureResultView;
        }
Пример #7
0
    private static void CreateInstance()
    {
        GameObject go = new GameObject(nameof(LifeCycleEvents));

        go.hideFlags = HideFlags.NotEditable | HideFlags.DontSave;
        DontDestroyOnLoad(go);
        _instance = go.AddComponent <LifeCycleEvents>();
    }
Пример #8
0
        public GestureMod()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gesture-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 8000, "User2", "GESTURES");                  // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());
        }
 /// <summary>
 /// Initializes a new instance of the GestureResultView class and sets initial property values
 /// </summary>
 /// <param name="bodyIndex">Body Index associated with the current gesture detector</param>
 /// <param name="isTracked">True, if the body is currently tracked</param>
 /// <param name="detected">True, if the gesture is currently detected for the associated body</param>
 /// <param name="confidence">Confidence value for detection of the 'Seated' gesture</param>
 public GuiClass(int bodyIndex, bool isTracked, bool detected, float confidence, LifeCycleEvents lce, MmiCommunication mmic)
 {
     this.BodyIndex   = bodyIndex;
     this.IsTracked   = isTracked;
     this.Detected    = detected;
     this.Confidence  = confidence;
     this.ImageSource = this.notTrackedImage;
     this.lce         = lce;
     this.mmic        = mmic;
 }
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                        //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());



            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("gestureResultView");
            }

            this.GestureResultView = gestureResultView;

            // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;

            // open the reader for the vgb frames
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();
            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }

            // load the 'Seated' gesture from the gesture database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase))
            {
                // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures),
                // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name
                foreach (Gesture gesture in database.AvailableGestures)
                {
                    if (gesture.Name.Equals(this.crouch_gesture) ||
                        gesture.Name.Equals(this.dab_gesture) ||
                        gesture.Name.Equals(this.hey_gesture) ||
                        gesture.Name.Equals(this.hold_gesture) ||
                        gesture.Name.Equals(this.reload_gesture))
                    {
                        this.vgbFrameSource.AddGesture(gesture);
                    }
                }
            }
        }
Пример #11
0
        public MainWindow()
        {
            InitializeComponent();


            lce_speechMod  = new LifeCycleEvents("ASR", "FUSION", "speech-2", "acoustic", "command");
            mmic_speechMod = new MmiCommunication("localhost", 8000, "User2", "ASR");
            mmic_speechMod.Send(lce_speechMod.NewContextRequest());

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
        /// <summary>
        /// Initializes a new instance of the GestureResultView class and sets initial property values
        /// </summary>
        /// <param name="bodyIndex">Body Index associated with the current gesture detector</param>
        /// <param name="isTracked">True, if the body is currently tracked</param>
        /// <param name="detected">True, if the gesture is currently detected for the associated body</param>
        /// <param name="confidence">Confidence value for detection of the 'Seated' gesture</param>
        public GestureResultView(int bodyIndex, bool isTracked, bool detected, float confidence, LifeCycleEvents lce, MmiCommunication mmic, MainWindow main)
        {
            this.BodyIndex   = bodyIndex;
            this.IsTracked   = isTracked;
            this.Detected    = detected;
            this.Confidence  = confidence;
            this.ImageSource = this.notTrackedImage;
            this.lce         = lce;
            this.mmic        = mmic;
            this.main        = main;

            //main.SetState("deactive");
        }
Пример #13
0
        public SecondMod()
        {
            //init LifeCycleEvents..


            // CHANGED FOR FUSION ---------------------------------------

            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command");
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");  //CHANGED To user1

            // END CHANGED FOR FUSION------------------------------------

            mmic.Send(lce.NewContextRequest());
        }
Пример #14
0
        public SpeechMod(TextBox textBox)
        {
            specialCharacters = new List <Tuple <string, string> >();
            specialCharacters.Add(new Tuple <string, string>("é", "(e)"));
            specialCharacters.Add(new Tuple <string, string>("ê", "(e_acent)"));
            specialCharacters.Add(new Tuple <string, string>("í", "(i)"));
            specialCharacters.Add(new Tuple <string, string>("ç", "(c)"));
            specialCharacters.Add(new Tuple <string, string>("ã", "(a_till)"));
            specialCharacters.Add(new Tuple <string, string>("à", "(a_haver)"));
            specialCharacters.Add(new Tuple <string, string>("á", "(a_acent)"));
            specialCharacters.Add(new Tuple <string, string>("â", "(a_cir)"));
            specialCharacters.Add(new Tuple <string, string>("õ", "(o_till)"));
            specialCharacters.Add(new Tuple <string, string>("ó", "(o_acent)"));


            Console.WriteLine("OK...");
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                 //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());



            //load pt recognizer
            sre            = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            grMain         = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml");
            grMain.Name    = "Main Grammar";
            grMain.Enabled = true;

            grYesNo         = new Grammar(Environment.CurrentDirectory + "\\yesNoGrammar.grxml");
            grYesNo.Name    = "YesNo Grammar";
            grYesNo.Enabled = false;

            sre.LoadGrammar(grMain);
            sre.LoadGrammar(grYesNo);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            //server to receive commands from APP!!
            appServer = new AppServer(sre, textBox, resetGrammar);
            appServer.run();
        }
Пример #15
0
        // Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                        //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());
            count = 0;

            if (kinectSensor == null)
            {
                throw new ArgumentNullException("Kinect Sensor is null");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("Gesture Result View is null");
            }

            GestureResultView = gestureResultView;

            // Create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            vgbFrameSource.TrackingIdLost += Source_TrackingIdLost;

            // Open the reader for the vgb frames
            vgbFrameReader = vgbFrameSource.OpenReader();
            if (vgbFrameReader != null)
            {
                vgbFrameReader.IsPaused      = true;
                vgbFrameReader.FrameArrived += Reader_GestureFrameArrived;
            }

            // Load gestures from database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabase))
            {
                foreach (Gesture gesture in database.AvailableGestures)
                {
                    if (gesture.Name.Equals(stop) || gesture.Name.Equals(back) || gesture.Name.Equals(skip) ||
                        gesture.Name.Equals(vdown) || gesture.Name.Equals(vup))
                    {
                        vgbFrameSource.AddGesture(gesture);
                    }
                }
            }
        }
Пример #16
0
        public MainWindow()
        {
            //init LifeCycleEvents..
            lce = new LifeCycleEvents("KINECT", "FUSION", "gesture-1", "gesture", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User2", "KINECT");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //this.kinect.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            OnOpenSensor();
            InitializeComponent();
            OnLoadGestureFromDBd();
            OnOpenReaders();

            Closed += OnWindowClosing;
        }
Пример #17
0
        public SpeechMod()
        {
            //Initialize LifeCycleEvents.
            LCE  = new LifeCycleEvents("ASR", "IM", "speech-1", "acoustic", "command");
            MMIC = new MmiCommunication("localhost", 8000, "User1", "ASR");

            MMIC.Send(LCE.NewContextRequest());

            //load pt recognizer
            SRE = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "basicCmd");
            SRE.LoadGrammar(gr);

            SRE.SetInputToDefaultAudioDevice();
            SRE.RecognizeAsync(RecognizeMode.Multiple);
            SRE.SpeechRecognized   += SRE_SpeechRecognized;
            SRE.SpeechHypothesized += SRE_SpeechHypothesized;
        }
Пример #18
0
        public SpeechMod()
        {
            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);


            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized += Sre_SpeechRecognized;
        }
Пример #19
0
        /// <summary>
        /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
        /// </summary>
        /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param>
        /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param>
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            //LCE = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command");
            //MMIC = new MmiCommunication("localhost", 9876, "User1", "ASR");
            LCE  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command");
            MMIC = new MmiCommunication("localhost", 9876, "User1", "TOUCH");           //CHANGED TO USER1
            //LCE = new LifeCycleEvents("KINECT", "FUSION", "kinect-1", "kinect", "command");
            //MMIC = new MmiCommunication("localhost", 9876, "User1", "KINECT");
            MMIC.Send(LCE.NewContextRequest());

            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("gestureResultView");
            }

            this.GestureResultView = gestureResultView;

            // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;

            // open the reader for the vgb frames
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();
            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }

            // load the gestures from the gesture database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase))
            {
                this.vgbFrameSource.AddGestures(database.AvailableGestures); //load all gestures from the database
            }
        }
Пример #20
0
        /// <summary>
        /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
        /// </summary>
        /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param>
        /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param>
        public GestureDetector(KinectSensor kinectSensor, MainWindow window)
        {
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            this.mainWindow    = window;
            this.gFrameHandler = new GestureFrameHandler(window);
            this.gFrameHandler.load("gestures.json");

            serverPipe = new AppServer(window);
            serverPipe.run();

            //init LifeCycleEvents..
            //new LifeCycleEvents("GESTURES", "FUSION", "gesture-1", "haptics", "command")
            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");             //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;

            // open the reader for the vgb frames
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();
            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }

            // load the all gestures from the gesture database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(GestureNames.gestureDatabase))
            {
                vgbFrameSource.AddGestures(database.AvailableGestures);
            }
        }
Пример #21
0
        /// <summary>
        /// Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
        /// </summary>
        /// <param name="kinectSensor">Active sensor to initialize the VisualGestureBuilderFrameSource object with</param>
        /// <param name="gestureResultView">GestureResultView object to store gesture results of a single body to</param>
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView, MainWindow main)
        {
            this.GestureResultView = gestureResultView;
            this.main = main;
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("gestureResultView");
            }

            //Init lifeCycleEvents
            lce  = new LifeCycleEvents("ASR", "IM", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");               // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());

            // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            //this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;

            // open the reader for the vgb frames
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();
            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }

            // load the gestures from the gesture database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase))
            {
                // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures),
                // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name
                this.vgbFrameSource.AddGestures(database.AvailableGestures);
            }
        }
Пример #22
0
        public MainWindow()
        {
            this.lastResult = 0;
            this.InitializeComponent();
            this.expression = "";
            this._calc      = new Calculator();
            this.gotResult  = false;

            this.lce  = new LifeCycleEvents("GESTURES", "FUSION", "gm-1", "gestures", "command");
            this.mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES");
            mmic.Send(lce.NewContextRequest());

            KinectRegion.SetKinectRegion(this, kinectRegion);
            App app = ((App)Application.Current);

            app.KinectRegion = kinectRegion;

            // Use the default sensor
            this.kinectRegion.KinectSensor = KinectSensor.GetDefault();
            this.kinectRegion.KinectSensor.Open();

            bodies = new Body[this.kinectRegion.KinectSensor.BodyFrameSource.BodyCount];

            gestureDatabase    = new VisualGestureBuilderDatabase(@"gestures.gbd");
            gestureFrameSource = new VisualGestureBuilderFrameSource(this.kinectRegion.KinectSensor, 0);

            foreach (var gesture in gestureDatabase.AvailableGestures)
            {
                this.gestureFrameSource.AddGesture(gesture);
            }

            gestureFrameSource.TrackingId    = 1;
            gestureFrameReader               = gestureFrameSource.OpenReader();
            gestureFrameReader.IsPaused      = true;
            gestureFrameReader.FrameArrived += gestureFrameReader_FrameArrived;

            multiFrameReader = this.kinectRegion.KinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Body);
            multiFrameReader.MultiSourceFrameArrived += multiFrameReader_MultiSourceFrameArrived;
        }
        public SpeechMod()
        {
            //init Text-To-Speech
            t = new Tts();
            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);
            //Grammar rt = new Grammar(Environment.CurrentDirectory + @"\..\..\Restaurants.grxml", "main");
            //sre.LoadGrammar(rt);
            grammarLoaded = false;


            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            sre.RecognizerUpdateReached +=
                new EventHandler <RecognizerUpdateReachedEventArgs>(recognizer_RecognizerUpdateReached);

            var fileSystemWatcher = new FileSystemWatcher();

            fileSystemWatcher.Changed += FileSystemWatcher_Changed;



            fileSystemWatcher.Path = Environment.CurrentDirectory + @"\..\..";

            fileSystemWatcher.EnableRaisingEvents = true;
        }
Пример #24
0
        public SpeechMod(System.Windows.Shapes.Ellipse circle, System.Windows.Threading.Dispatcher dispatcher)
        {
            //init LifeCycleEvents..
            this.circle     = circle;
            this.Dispatcher = dispatcher;



            // CHANGED FOR FUSION ---------------------------------------

            lce  = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command");
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");
            mmic.Start();
            // END CHANGED FOR FUSION------------------------------------

            mmic.Send(lce.NewContextRequest());

            mmiC          = new MmiCommunication("localhost", 8000, "User2", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\grammarInitial.grxml", "rootRule");
            sre.LoadGrammar(gr);


            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            tts = new Tts();
            // introduce assistant
            Speak("Olá, eu sou o seu assistente do PowerPoint, em que lhe posso ser util?", 5);
        }
        public MainWindow()
        {
            main = this;

            InitializeComponent();

            options.Add(new TodoItem()
            {
                Title = "Pesquisar Voo para Paris", Color = "#ff00BCF2"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Voo para Roma"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Voo para Londres"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Hotel para Paris"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Hotel para Roma"
            });
            options.Add(new TodoItem()
            {
                Title = "Pesquisar Hotel para Londres"
            });

            lbTodoList.ItemsSource = options;

            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // set IsAvailableChanged event notifier
            this.kinectSensor.IsAvailableChanged += this.Sensor_IsAvailableChanged;

            // open the sensor
            this.kinectSensor.Open();

            // set the status text
            this.StatusText = this.kinectSensor.IsAvailable ? Properties.Resources.RunningStatusText
                                                            : Properties.Resources.NoSensorStatusText;

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // set the BodyFramedArrived event notifier
            this.bodyFrameReader.FrameArrived += this.Reader_BodyFrameArrived;

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the gesture detection objects for our gestures
            this.gestureDetectorList = new List <GestureDetector>();

            // initialize the MainWindow
            this.InitializeComponent();

            // set our data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            // create a gesture detector for each body (6 bodies => 6 detectors) and create content controls to display results in the UI
            //int col0Row = 0;
            //int col1Row = 0;
            //int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount;
            //for (int i = 0; i < maxBodies; ++i)
            //{
            GestureResultView result   = new GestureResultView(0, false, false, 0.0f);
            GestureDetector   detector = new GestureDetector(this.kinectSensor, result, this.main, circle, this.Dispatcher);

            this.gestureDetectorList.Add(detector);

            // split gesture results across the first two columns of the content grid
            ContentControl contentControl = new ContentControl();

            contentControl.Content = this.gestureDetectorList[0].GestureResultView;

            //if (i % 2 == 0)
            //{
            // Gesture results for bodies: 0, 2, 4
            Grid.SetColumn(contentControl, 0);
            Grid.SetRow(contentControl, 2);
            //++col0Row;
            //}
            //else
            //{
            // Gesture results for bodies: 1, 3, 5
            // Grid.SetColumn(contentControl, 1);
            //Grid.SetRow(contentControl, col1Row);
            // ++col1Row;
            //}

            this.contentGrid.Children.Add(contentControl);
            //}

            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());
        }