Exemplo n.º 1
0
        void CloseHandGesture_OnGestureDetected(string gesture)
        {
            string json_msg = "{ \"recognized\": [";

            json_msg += "\"" + "right-hand" + "\", ";
            json_msg += "\"" + "gesto" + "\":\"" + gesture + "\"";
            json_msg.Substring(0, json_msg.Length - 2);
            json_msg += "] }";

            var exNot = lce.ExtensionNotification("", "", 0.0f, json_msg);

            Console.WriteLine(exNot);
            mmic.Send(exNot);
        }
Exemplo n.º 2
0
        public MainWindow()
        {
            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command");
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");
            mmic.Send(lce.NewContextRequest());
            InitializeComponent();

            // only one sensor is currently supported
            this.kinectSensor = KinectSensor.GetDefault();

            // open the sensor
            this.kinectSensor.Open();

            // set the initial status text
            this.UpdateKinectStatusText();

            // open the reader for the body frames
            this.bodyFrameReader = this.kinectSensor.BodyFrameSource.OpenReader();

            // initialize the BodyViewer object for displaying tracked bodies in the UI
            this.kinectBodyView = new KinectBodyView(this.kinectSensor);

            // initialize the GestureDetector object
            this.gestureResultView = new GestureResultView(false, 0, 0);
            this.gestureDetector   = new GestureDetector(this.kinectSensor, this.gestureResultView);

            // set data context objects for display in UI
            this.DataContext = this;
            this.kinectBodyViewbox.DataContext = this.kinectBodyView;

            //this.gestureResultGrid.DataContext = this.gestureResultView;
        }
Exemplo n.º 3
0
        public void sendMessage(String message)
        {
            string json = "{ \"recognized\": [";

            json += "\"" + message + "\", ";
            json  = json.Substring(0, json.Length - 2);
            json += "] }";

            lastResult      = _calc.makeCalculation(expression);
            TextRegion.Text = lastResult.ToString();
            expression      = lastResult.ToString();
            _calc.resetValues();
            gotResult = true;

            if (TextRegion.Text.Contains(","))
            {
                gotResult  = false;
                lastResult = 0;
                expression = "";
            }

            var exNot = lce.ExtensionNotification("", "", 1, json);

            mmic.Send(exNot);
        }
Exemplo n.º 4
0
        private void Sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            onRecognized(new SpeechEventArg()
            {
                Text = e.Result.Text, Confidence = e.Result.Confidence, Final = true
            });

            if (e.Result.Confidence >= 0.6)
            {
                string[] tags = getTags(e.Result.Semantics);
                string   msg  = needsConfirmation(tags);

                if (msg != null)
                {
                    //Send data to server
                    if (!msg.Equals(""))
                    {
                        Console.WriteLine("Sending: " + msg);

                        var exNot = lce.ExtensionNotification(e.Result.Audio.StartTime + "", e.Result.Audio.StartTime.Add(e.Result.Audio.Duration) + "", e.Result.Confidence, msg);
                        mmic.Send(exNot);
                    }
                }
            }
        }
Exemplo n.º 5
0
        public SpeechMod()
        {
            string sound_path = System.IO.Directory.GetCurrentDirectory() + @"\msg_sound.wav";

            ring = new MediaPlayer();
            ring.Open(new Uri(sound_path));
            ring.Volume = 0.10;

            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());


            lena = new Tts();
            //lena.Speak("Bom dia. Eu sou a Lena.");
            //iniciate connection to socket

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);


            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;
        }
Exemplo n.º 6
0
        public SpeechMod()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                 //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            // load speech sintesizer
            tts = new Tts();

            //onRecognized(new SpeechEventArg() { Text = "MUTE", Confidence = 100, Final = true, AssistantActive = assistantActive });

            // send command
            // format {"recognized":["SHAPE","COLOR"]}

            /*string json = "{ \"recognized\": [\"MUTE\"] }";
             * Console.WriteLine(json);
             * var exNot = lce.ExtensionNotification("","", 100, json);
             * mmic.Send(exNot);*/

            // introduce assistant
            //Speak("Olá, eu sou a Maria, a tua assistente pessoal. Tenho todo o gosto em ajudar-te com algumas tarefas no teu computador. Podes saber mais sobre mim dizendo: ajuda. Sou um pouco distraída, por isso sempre que quiseres chamar por mim diz: ó Maria!");
        }
Exemplo n.º 7
0
        private void SRE_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            onRecognized(new SpeechEventArg()
            {
                Text = e.Result.Text, Confidence = e.Result.Confidence, Final = true
            });

            //SEND JSON MESSAGE
            string json = "{";

            foreach (var resultSemantic in e.Result.Semantics)
            {
                json += "\"" + resultSemantic.Key + "\":\"" + resultSemantic.Value.Value + "\", ";
            }
            json.Substring(0, json.Length - 2);
            json += " }";

            var exNot = LCE.ExtensionNotification(e.Result.Audio.StartTime + "", e.Result.Audio.StartTime.Add(e.Result.Audio.Duration) + "", e.Result.Confidence, json);

            if (e.Result.Confidence > 0.85)
            {
                Console.WriteLine(exNot);
                MMIC.Send(exNot);
            }
        }
Exemplo n.º 8
0
        private void Sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            onRecognized(new SpeechEventArg()
            {
                Text = e.Result.Text, Confidence = e.Result.Confidence, Final = true
            });

            if (e.Result.Confidence < minimum_confidence_threshold)
            {
                return;
            }

            //SEND
            // IMPORTANT TO KEEP THE FORMAT {"recognized":["SHAPE","COLOR"]}
            string json = "{ \"recognized\": [";

            foreach (var resultSemantic in e.Result.Semantics)
            {
                json += "\"" + resultSemantic.Value.Value + "\", ";
            }
            json  = json.Substring(0, json.Length - 2);
            json += "] }";

            var exNot = lce.ExtensionNotification(e.Result.Audio.StartTime + "", e.Result.Audio.StartTime.Add(e.Result.Audio.Duration) + "", e.Result.Confidence, json);

            mmic.Send(exNot);
        }
Exemplo n.º 9
0
        private void SendMsg_Tts(string message, string type)
        {
            string json  = "{\"action\":\"" + type + "\",\"text_to_speak\":\"" + message + "\"}";
            var    exNot = lce_speechMod.ExtensionNotification("", "", 0, json);

            mmic_speechMod.Send(exNot);
        }
Exemplo n.º 10
0
        public GestureDetector(KinectSensor kinectSensor, ComModule coms)
        {
            this.coms = coms;
            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();

            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }
            VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabasePath);

            if (database == null)
            {
                Console.WriteLine("No gesture database!");
                Environment.Exit(1);
            }

            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gesture-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "GESTURES");                  // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());

            this.vgbFrameSource.AddGestures(database.AvailableGestures);
            fpsCounter         = 0;
            gestureWasDetected = false;
        }
Exemplo n.º 11
0
        public MainWindow()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "IM", "speech-1", "acoustic", "command");
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");
            tts  = new TTS();

            mmic.Send(lce.NewContextRequest());
            InitializeComponent();

            var sensor = KinectSensor.GetDefault();

            if (sensor != null)
            {
                _close_hand_gesture.GestureRecognized               += Gesture_Recognized;
                _open_hand_gesture.GestureRecognized                += Gesture_Recognized;
                _swipe_right_gesture.GestureRecognized              += Gesture_Recognized;
                _swipe_left_gesture.GestureRecognized               += Gesture_Recognized;
                _swipe_down_gesture.GestureRecognized               += Gesture_Recognized;
                _swipe_up_gesture.GestureRecognized                 += Gesture_Recognized;
                _rotate_clockwise_gesture.GestureRecognized         += Gesture_Recognized;
                _rotate_counter_clockwise_gesture.GestureRecognized += Gesture_Recognized;
                _cross_gesture.GestureRecognized      += Gesture_Recognized;
                _close_ears_gesture.GestureRecognized += Gesture_Recognized;

                sensor.Open();
            }
            this.Loaded += OnLoaded;
        }
Exemplo n.º 12
0
        public SpeechMod(System.Windows.Shapes.Ellipse circle, System.Windows.Threading.Dispatcher dispatcher)
        {
            this.circle     = circle;
            this.Dispatcher = dispatcher;

            //init LifeCycleEvents..
            lce = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User1", "ASR");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //load pt recognizer
            sre = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "rootRule");
            sre.LoadGrammar(gr);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            // load speech synthetizer
            tts = new Tts();

            // introduce assistant
            Speak("Olá, eu sou o teu assistente de viagens. Tenho todo o gosto em ajudar-te a planear as tuas férias de sonho. Podes saber mais sobre mim dizendo: preciso de ajuda.", 12);
        }
Exemplo n.º 13
0
        public GestureMod()
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gesture-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 8000, "User2", "GESTURES");                  // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());
        }
Exemplo n.º 14
0
        private void SendCommand(string command)
        {
            //SEND
            // IMPORTANT TO KEEP THE FORMAT {"recognized":["SEARCH","FLIGHT"]}
            string json  = "{ \"recognized\":[\"" + command + "\",\"\"] }";
            var    exNot = lce.ExtensionNotification("", "", 100, json);

            mmic.Send(exNot);
        }
Exemplo n.º 15
0
        public void SendCommand(String command)
        {
            //SEND
            // IMPORTANT TO KEEP THE FORMAT {"recognized":["SHAPE","COLOR"]}
            string json = "{ \"recognized\":[\"" + command + "\"] }";

            var exNot = lce.ExtensionNotification("", "", 100, json);

            mmic.Send(exNot);
        }
Exemplo n.º 16
0
        //  NEW
        //  A TEIXEIRA , 16 MAY 2018

        internal void sendToFusion(string value)
        {
            //SEND
            string json = "{ \"recognized\": [";
            //foreach (var resultSemantic in e.Result.Semantics)

            String key = "color";

            json += "\"" + key + "\",\"" + value + "\", ";

            json = json.Substring(0, json.Length - 2);


            json += " ] }";

            //  start time, end time, confidence, json  TO BE COMPLETED
            var exNot = lce.ExtensionNotification("-1", "-1", 1.0f, json);


            mmic.Send(exNot);
        }
Exemplo n.º 17
0
        private void Sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            onRecognized(new SpeechEventArg()
            {
                Text = e.Result.Text, Confidence = e.Result.Confidence, Final = true
            });

            /*foreach (var resultSemantic in e.Result.Semantics)
             *  Console.WriteLine(resultSemantic.Key+":"+resultSemantic.Value.Value);*/

            string json  = "{ \"recognized\": [";
            bool   first = true;

            if (e.Result.Confidence <= 0.30)
            {
                return;
            }

            foreach (var resultSemantic in e.Result.Semantics)
            {
                if (!resultSemantic.Value.Value.ToString().Equals(""))
                {
                    json  = AddJsonTag(json, resultSemantic.Key, resultSemantic.Value.Value.ToString(), first);
                    first = false;
                }
            }
            if (first)
            {
                json = "{ ";
            }


            if (e.Result.Confidence > 0.30 && e.Result.Confidence <= 0.45)
            {
                json = AddJsonTag(json, "confidence", "low confidence", false);
            }
            else if (e.Result.Confidence > 0.45 && e.Result.Confidence < 0.8)
            {
                json = AddJsonTag(json, "confidence", "explicit confirmation", false);
            }
            else if (e.Result.Confidence >= 0.8)
            {
                json = AddJsonTag(json, "confidence", "implicit confirmation", false);
            }
            json  = AddJsonTag(json, "modality", "speech", false);
            json  = json.Substring(0, json.Length - 2);
            json += "}";
            Console.WriteLine(json);
            //Console.WriteLine("--------"+e.Result.Semantics["action"].Value+"-------");
            var exNot = lce.ExtensionNotification(e.Result.Audio.StartTime + "", e.Result.Audio.StartTime.Add(e.Result.Audio.Duration) + "", e.Result.Confidence, json);

            mmic.Send(exNot);
        }
Exemplo n.º 18
0
        private void sendMessage(string gesture)
        {
            string json = "{ \"recognized\": [";

            json += "\"" + gesture + "\", ";
            json  = json.Substring(0, json.Length - 2);
            json += "] }";

            var exNot = lce.ExtensionNotification("", "", 1, json);

            mmic.Send(exNot);
        }
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                        //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());



            if (kinectSensor == null)
            {
                throw new ArgumentNullException("kinectSensor");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("gestureResultView");
            }

            this.GestureResultView = gestureResultView;

            // create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            this.vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            this.vgbFrameSource.TrackingIdLost += this.Source_TrackingIdLost;

            // open the reader for the vgb frames
            this.vgbFrameReader = this.vgbFrameSource.OpenReader();
            if (this.vgbFrameReader != null)
            {
                this.vgbFrameReader.IsPaused      = true;
                this.vgbFrameReader.FrameArrived += this.Reader_GestureFrameArrived;
            }

            // load the 'Seated' gesture from the gesture database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(this.gestureDatabase))
            {
                // we could load all available gestures in the database with a call to vgbFrameSource.AddGestures(database.AvailableGestures),
                // but for this program, we only want to track one discrete gesture from the database, so we'll load it by name
                foreach (Gesture gesture in database.AvailableGestures)
                {
                    if (gesture.Name.Equals(this.crouch_gesture) ||
                        gesture.Name.Equals(this.dab_gesture) ||
                        gesture.Name.Equals(this.hey_gesture) ||
                        gesture.Name.Equals(this.hold_gesture) ||
                        gesture.Name.Equals(this.reload_gesture))
                    {
                        this.vgbFrameSource.AddGesture(gesture);
                    }
                }
            }
        }
Exemplo n.º 20
0
        public MainWindow()
        {
            InitializeComponent();


            lce_speechMod  = new LifeCycleEvents("ASR", "FUSION", "speech-2", "acoustic", "command");
            mmic_speechMod = new MmiCommunication("localhost", 8000, "User2", "ASR");
            mmic_speechMod.Send(lce_speechMod.NewContextRequest());

            mmiC          = new MmiCommunication("localhost", 8000, "User1", "GUI");
            mmiC.Message += MmiC_Message;
            mmiC.Start();
        }
Exemplo n.º 21
0
        private void GJson()
        {
            string gesture = "{";

            if (reset == false)
            {
                if (this.gestureResultView.Headphones < 0.2 &&
                    this.gestureResultView.Hands_air < 0.3 &&
                    this.gestureResultView.Kill < 0.2 &&
                    this.gestureResultView.Play_Pause < 0.2 &&
                    this.gestureResultView.Swipe_Left < 0.2 &&
                    this.gestureResultView.Swipe_Right < 0.2 &&
                    this.gestureResultView.Jarbas_Init < 0.2)
                {
                    reset = true;
                }
            }
            else
            {
                if (this.gestureResultView.Headphones > 0.5 ||
                    this.gestureResultView.Hands_air > 0.7 ||
                    this.gestureResultView.Kill > 0.5 ||
                    this.gestureResultView.Play_Pause > 0.5 ||
                    this.gestureResultView.Swipe_Left > 0.5 ||
                    this.gestureResultView.Swipe_Right > 0.4 ||
                    this.gestureResultView.Jarbas_Init > 0.5)
                {
                    gesture += "\"" + "Kill" + "\":\"" + this.gestureResultView.Kill + "\", ";
                    gesture += "\"" + "Hands_air" + "\":\"" + this.gestureResultView.Hands_air + "\", ";
                    gesture += "\"" + "Headphones" + "\":\"" + this.gestureResultView.Headphones + "\", ";
                    gesture += "\"" + "Swipe_Right" + "\":\"" + this.gestureResultView.Swipe_Right + "\", ";
                    gesture += "\"" + "Swipe_Left" + "\":\"" + this.gestureResultView.Swipe_Left + "\", ";
                    gesture += "\"" + "Play_Pause" + "\":\"" + this.gestureResultView.Play_Pause + "\", ";
                    gesture += "\"" + "Jarbas_init" + "\":\"" + this.gestureResultView.Jarbas_Init + "\", ";

                    gesture.Substring(0, gesture.Length - 2);
                    gesture += "}";
                    var exNot = lce.ExtensionNotification("", "", 0.0f, gesture);
                    Console.WriteLine("Kill" + this.gestureResultView.Kill);
                    Console.WriteLine("Play_Pause" + this.gestureResultView.Play_Pause);
                    Console.WriteLine("Headphones" + this.gestureResultView.Headphones);
                    Console.WriteLine("Hands_air" + this.gestureResultView.Hands_air);
                    Console.WriteLine("Swipe_Left" + this.gestureResultView.Swipe_Left);
                    Console.WriteLine("Swipe_right" + this.gestureResultView.Swipe_Right);
                    Console.WriteLine("Jarbas_init" + this.gestureResultView.Jarbas_Init);
                    mmic.Send(exNot);
                    reset = false;
                }
            }
        }
Exemplo n.º 22
0
        public SecondMod()
        {
            //init LifeCycleEvents..


            // CHANGED FOR FUSION ---------------------------------------

            lce  = new LifeCycleEvents("TOUCH", "FUSION", "touch-1", "touch", "command");
            mmic = new MmiCommunication("localhost", 9876, "User1", "TOUCH");  //CHANGED To user1

            // END CHANGED FOR FUSION------------------------------------

            mmic.Send(lce.NewContextRequest());
        }
Exemplo n.º 23
0
        public void GestureRecognized(String action, float confidence)
        {
            //SEND
            // IMPORTANT TO KEEP THE FORMAT {"recognized":["SHAPE","COLOR"]}
            string json = "{ \"recognized\": [";

            json += "\"" + action + "\", ";
            json  = json.Substring(0, json.Length - 2);
            json += "] }";
            Console.WriteLine(json);

            var exNot = lce.ExtensionNotification("", "", confidence, json);

            mmic.Send(exNot);
        }
Exemplo n.º 24
0
        // Send JSON message indicating the parameters in use
        private void sendMessage(string gesture, double confidence)
        {
            string json = "{ \"recognized\": [";

            json += "\"" + gesture + "\", ";
            // Just using the first two comands. The rest is EMP
            for (int i = 1; i < 8; i++)
            {
                json += "\"" + "EMP" + "\", ";
            }
            json  = json.Substring(0, json.Length - 2);
            json += "] }";
            var exNot = lce.ExtensionNotification("", "", 1, json);

            mmic.Send(exNot);
        }
Exemplo n.º 25
0
        public SpeechMod(TextBox textBox)
        {
            specialCharacters = new List <Tuple <string, string> >();
            specialCharacters.Add(new Tuple <string, string>("é", "(e)"));
            specialCharacters.Add(new Tuple <string, string>("ê", "(e_acent)"));
            specialCharacters.Add(new Tuple <string, string>("í", "(i)"));
            specialCharacters.Add(new Tuple <string, string>("ç", "(c)"));
            specialCharacters.Add(new Tuple <string, string>("ã", "(a_till)"));
            specialCharacters.Add(new Tuple <string, string>("à", "(a_haver)"));
            specialCharacters.Add(new Tuple <string, string>("á", "(a_acent)"));
            specialCharacters.Add(new Tuple <string, string>("â", "(a_cir)"));
            specialCharacters.Add(new Tuple <string, string>("õ", "(o_till)"));
            specialCharacters.Add(new Tuple <string, string>("ó", "(o_acent)"));


            Console.WriteLine("OK...");
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("ASR", "FUSION", "speech-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                 //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "ASR"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());



            //load pt recognizer
            sre            = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            grMain         = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml");
            grMain.Name    = "Main Grammar";
            grMain.Enabled = true;

            grYesNo         = new Grammar(Environment.CurrentDirectory + "\\yesNoGrammar.grxml");
            grYesNo.Name    = "YesNo Grammar";
            grYesNo.Enabled = false;

            sre.LoadGrammar(grMain);
            sre.LoadGrammar(grYesNo);

            sre.SetInputToDefaultAudioDevice();
            sre.RecognizeAsync(RecognizeMode.Multiple);
            sre.SpeechRecognized   += Sre_SpeechRecognized;
            sre.SpeechHypothesized += Sre_SpeechHypothesized;

            //server to receive commands from APP!!
            appServer = new AppServer(sre, textBox, resetGrammar);
            appServer.run();
        }
Exemplo n.º 26
0
        // Initializes a new instance of the GestureDetector class along with the gesture frame source and reader
        public GestureDetector(KinectSensor kinectSensor, GestureResultView gestureResultView)
        {
            //init LifeCycleEvents..
            lce  = new LifeCycleEvents("GESTURES", "FUSION", "gestures-1", "acoustic", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            mmic = new MmiCommunication("localhost", 9876, "User1", "ASR");                        //PORT TO FUSION - uncomment this line to work with fusion later
            //mmic = new MmiCommunication("localhost", 8000, "User1", "GESTURES"); // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)
            mmic.Send(lce.NewContextRequest());
            count = 0;

            if (kinectSensor == null)
            {
                throw new ArgumentNullException("Kinect Sensor is null");
            }

            if (gestureResultView == null)
            {
                throw new ArgumentNullException("Gesture Result View is null");
            }

            GestureResultView = gestureResultView;

            // Create the vgb source. The associated body tracking ID will be set when a valid body frame arrives from the sensor.
            vgbFrameSource = new VisualGestureBuilderFrameSource(kinectSensor, 0);
            vgbFrameSource.TrackingIdLost += Source_TrackingIdLost;

            // Open the reader for the vgb frames
            vgbFrameReader = vgbFrameSource.OpenReader();
            if (vgbFrameReader != null)
            {
                vgbFrameReader.IsPaused      = true;
                vgbFrameReader.FrameArrived += Reader_GestureFrameArrived;
            }

            // Load gestures from database
            using (VisualGestureBuilderDatabase database = new VisualGestureBuilderDatabase(gestureDatabase))
            {
                foreach (Gesture gesture in database.AvailableGestures)
                {
                    if (gesture.Name.Equals(stop) || gesture.Name.Equals(back) || gesture.Name.Equals(skip) ||
                        gesture.Name.Equals(vdown) || gesture.Name.Equals(vup))
                    {
                        vgbFrameSource.AddGesture(gesture);
                    }
                }
            }
        }
Exemplo n.º 27
0
        public MainWindow()
        {
            //init LifeCycleEvents..
            lce = new LifeCycleEvents("KINECT", "FUSION", "gesture-1", "gesture", "command"); // LifeCycleEvents(string source, string target, string id, string medium, string mode)
            //mmic = new MmiCommunication("localhost",9876,"User1", "ASR");  //PORT TO FUSION - uncomment this line to work with fusion later
            mmic = new MmiCommunication("localhost", 8000, "User2", "KINECT");                // MmiCommunication(string IMhost, int portIM, string UserOD, string thisModalityName)

            mmic.Send(lce.NewContextRequest());

            //this.kinect.IsAvailableChanged += this.Sensor_IsAvailableChanged;
            OnOpenSensor();
            InitializeComponent();
            OnLoadGestureFromDBd();
            OnOpenReaders();

            Closed += OnWindowClosing;
        }
Exemplo n.º 28
0
        public void gestureSelection(string selection)
        {
            if (!serverPipe.IsSpeakRunning)
            {
                StringBuilder json = new StringBuilder("{ \"type\": \"NORMAL\",\"confidence\": \"GOOD\",\"recognized\": [" + "\"" + selection + "\"");
                switch (selection)
                {
                case "CANTEENS":
                    json.Append(",\"TYPE5\" ] }");
                    break;

                case "SAS":
                    string subtype = "SUBTYPE1";
                    string type    = "TYPE1";
                    json.Append(",\"" + type + "\"," + "\"" + subtype + "\"" + "] }");
                    break;

                case "SAC":
                    json.Append(",\"TYPE1\" ] }");
                    break;

                case "NEWS":
                    json.Append(",\"TYPE1\" ] }");
                    break;

                case "WEATHER":
                    json.Append(",\"TYPE1\",\"tomorrow\",\"tomorrow\",\"\" ] }");
                    break;

                case "HELP":
                    json.Append(" ] }");
                    break;
                }

                //gui color change
                mainWindow.resetDefaultColor();
                serverPipe.IsSpeakRunning = true; //manual change so dont allow kinect change to orange between frames
                mainWindow.changeColorTiles(selection, Brushes.Green);


                Console.WriteLine(json.ToString());
                var exNot = lce.ExtensionNotification(0 + "", 10 + "", 0, json.ToString());
                mmic.Send(exNot);
            }
        }
Exemplo n.º 29
0
        public SpeechMod()
        {
            //Initialize LifeCycleEvents.
            LCE  = new LifeCycleEvents("ASR", "IM", "speech-1", "acoustic", "command");
            MMIC = new MmiCommunication("localhost", 8000, "User1", "ASR");

            MMIC.Send(LCE.NewContextRequest());

            //load pt recognizer
            SRE = new SpeechRecognitionEngine(new System.Globalization.CultureInfo("pt-PT"));
            gr  = new Grammar(Environment.CurrentDirectory + "\\ptG.grxml", "basicCmd");
            SRE.LoadGrammar(gr);

            SRE.SetInputToDefaultAudioDevice();
            SRE.RecognizeAsync(RecognizeMode.Multiple);
            SRE.SpeechRecognized   += SRE_SpeechRecognized;
            SRE.SpeechHypothesized += SRE_SpeechHypothesized;
        }
Exemplo n.º 30
0
        private void Sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e)
        {
            onRecognized(new SpeechEventArg()
            {
                Text = e.Result.Text, Confidence = e.Result.Confidence, Final = true
            });

            // SEND
            // IMPORTANT TO KEEP THE FORMAT {"recognized":["SHAPE","COLOR"]}
            string json = "{ \"recognized\": [";

            foreach (var resultSemantic in e.Result.Semantics)
            {
                json += "\"" + resultSemantic.Value.Value + "\", ";
            }
            json  = json.Substring(0, json.Length - 2);
            json += "] }";

            var exNot = lce.ExtensionNotification(e.Result.Audio.StartTime + "", e.Result.Audio.StartTime.Add(e.Result.Audio.Duration) + "", e.Result.Confidence, json);

            if (e.Result.Confidence > 0.75)
            {
                Console.WriteLine(exNot);
                mmic.Send(exNot);
            }
            else
            {
                Console.WriteLine("Confiança do comando de voz inferior a 85%");
                Random random = new Random();
                int    i      = random.Next(0, 10);
                if (i <= 3)
                {
                    tts.Speak("Não percebi o comando. Repita se faz favor.");
                }
                else if (i <= 4 && i >= 7)
                {
                    tts.Speak("Importa-se de repetir o comando se faz favor?");
                }
                else
                {
                    tts.Speak("Repita se faz favor o comando, não percebi...");
                }
            }
        }