Example #1
0
 public void AddSpeechCommand(SpeechCommand speechCommand)
 {
     if (speechCommand != null)
     {
         m_SpeechCommands.Add(speechCommand);
         KeywordManager.Instance.AddKeyword(speechCommand.KeyWord, speechCommand.OnWord, m_RawConfidence);
     }
 }
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;
            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.Speech))
            {
                trigger.ErrorDetected = true;
                Debug.LogError("Speech Module Not Set");
                return(false);
            }

            if (UpdateCommands == true)
            {
                UpdateCommandsInSenseManager();
                Debug.Log("Updated Speech Commands");
                UpdateCommands = false;
            }

            if (!(trigger is EventTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            if (SenseToolkitManager.Instance.Initialized && SenseToolkitManager.Instance.SpeechOutput != null)
            {
                for (int i = 0; i < SpeechCommands.Length; i++)
                {
                    if (SenseToolkitManager.Instance.SpeechOutput.ContainsKey(SpeechCommands[i].Word))
                    {
                        if (SenseToolkitManager.Instance.SpeechOutput[SpeechCommands[i].Word] > confidenceMax)
                        {
                            SpeechCommandsMax = SpeechCommands[i];
                            confidenceMax     = SenseToolkitManager.Instance.SpeechOutput[SpeechCommands[i].Word];
                            Debug.Log("SpeechMac:" + SpeechCommandsMax.Word);
                        }
                    }
                }

                if (confidenceMax > 30)
                {
                    if (SpeechCommandsMax != null)
                    {
                        Debug.Log("Speech:" + SpeechCommandsMax.Word);
                    }
                    confidenceMax = 30;
                    if (SpeechCommandsFind[0].Word == SpeechCommandsMax.Word)
                    {
                        Debug.Log("Jaaniii" + SpeechCommandsMax.Word);
                        return(true);
                    }
                }
            }

            return(false);
        }
        private static async Task <bool> ActivateForVoiceAsync(SpeechCommand activationSpeechCommand)
        {
            await Task.Delay(1);

            if (activationSpeechCommand != null)
            {
                return(true);
            }

            return(false);
        }
Example #4
0
    public void RemoveSpeechCommand(SpeechCommand speechCommand)
    {
        if (speechCommand != null)
        {
            if (m_SpeechCommands.Contains(speechCommand))
            {
                m_SpeechCommands.Remove(speechCommand);
            }

            KeywordManager.Instance.RemoveKeyword(speechCommand.KeyWord, speechCommand.OnWord);
        }
    }
Example #5
0
        protected void RaiseEvent(SpeechRecognizedEventArgs e, SpeechCommand command)
        {
            Console.WriteLine(e.Result.Confidence + " " + e.Result.Text);

            if (e.Result.Confidence < 0.9)
            {
                return;
            }

            var ev = this.CommandReceived;

            if (ev != null)
            {
                ev(command);
            }
        }
Example #6
0
        /// <summary>
        /// Invoked when the application is activated by an outside source, i.e. protocol or Cortana.
        /// </summary>
        /// <param name="args">
        /// Details about the activation.
        /// </param>
        protected override async void OnActivated(IActivatedEventArgs args)
        {
            ActivationArgs activationArgs = null;

            switch (args.Kind)
            {
            case ActivationKind.Protocol:
                var protocolArgs = args as ProtocolActivatedEventArgs;
                if (protocolArgs != null)
                {
                    activationArgs = new ActivationArgs(protocolArgs.Uri);
                }
                break;

            case ActivationKind.VoiceCommand:
                var voiceArgs = args as VoiceCommandActivatedEventArgs;
                if (voiceArgs != null)
                {
                    var result       = voiceArgs.Result;
                    var voiceCommand = result.RulePath[0];

                    var speechCommand = new SpeechCommand(result, new List <string>());

                    // Switch on the voice command string to determine function

                    activationArgs = new ActivationArgs(speechCommand);
                }
                break;
            }

            var rootFrame = Window.Current.Content as Frame;

            if (rootFrame == null)
            {
                await this.LaunchApplicationAsync(activationArgs);
            }
            else
            {
                await ActivationLauncher.RunActivationProcedureAsync(activationArgs);
            }

            base.OnActivated(args);
        }
Example #7
0
        public static CommandMatchResult Match(IEnumerable<string> sentence, SpeechCommand command)
        {
            MatchType matchType = MatchType.NoMatch;
            IEnumerable<string> matchSentence = new string[0];

            for(int skip = sentence.Count() - 1; skip >= 0; skip--) {
                var curSentence = sentence.Skip(skip);

                int nrOfKeyMatches = curSentence.Zip(command.KeyWords, (sentenceWord, keyWords) => keyWords.Contains(sentenceWord)).TakeWhile(b => b).Count();
                if(nrOfKeyMatches > matchSentence.Count() && nrOfKeyMatches == curSentence.Count()) {
                    matchSentence = curSentence.Take(nrOfKeyMatches);

                    if(nrOfKeyMatches == command.KeyWords.Count) {
                        matchType = MatchType.FullMatch;
                        break;
                    } else {
                        matchType = MatchType.StartMatch;
                    }
                }
            }

            return new CommandMatchResult(matchType, matchSentence, command);
        }
Example #8
0
 /// <summary>
 /// Initializes a new instance of the <see cref="ActivationArgs"/> class for a speech activation.
 /// </summary>
 /// <param name="speechCommand">
 /// The speech command.
 /// </param>
 public ActivationArgs(SpeechCommand speechCommand)
 {
     this.ActivationKind = ActivationKind.VoiceCommand;
     this.SpeechCommand  = speechCommand;
 }
        public bool Initialize()
        {
            try
            {
                if (!string.IsNullOrEmpty(_grammarFile) && File.Exists(_grammarFile))
                {
                    _speechEngine = new SpeechRecognitionEngine();

                    Console.WriteLine("Speech Engine created");

                    // Load the grammar file
                    var g = new Grammar(_grammarFile);
                    _speechEngine.LoadGrammar(g);

                    Console.WriteLine("Loaded Grammar File : {0}", _grammarFile);

                    // Subscribe to events
                    _speechEngine.SpeechRecognized += SpeechRecognized;
                    _speechEngine.SpeechRecognitionRejected += SpeechRejected;

                    _speechEngine.SetInputToDefaultAudioDevice();
                    _speechEngine.RecognizeAsync(RecognizeMode.Multiple);

                    if (_voiceCommandPublisher != null)
                    {
                        _voiceCommandPublisher.Initialize();
                    }

                    _speechCommand = new SpeechCommand(_voiceCommandPublisher);
                    _speechCommandTask = Task.Factory.StartNew(() => _speechCommand.Run(100));

                    Console.WriteLine("Started recognizing");
                }
            }
            catch (Exception ex)
            {
                Log.Error(ex.Message);
                Log.Error(ex.StackTrace);
            }
            return false;
        }
 public CommandMatchResult(MatchType type, IEnumerable<string> sentence, SpeechCommand command)
 {
     MatchType = type;
     Sentence = sentence.ToArray();
     Command = command;
 }
        public bool Initialize()
        {
            try
            {
                if (_kinectSensor == null && !string.IsNullOrEmpty(_grammarFile) && System.IO.File.Exists(_grammarFile))
                {
                    _kinectSensor = KinectSensor.GetDefault();
                    if (_kinectSensor != null)
                    {
                        // open the sensor
                        _kinectSensor.Open();

                        Console.WriteLine("Sensor Opened");

                        // grab the audio stream
                        IReadOnlyList<AudioBeam> audioBeamList = _kinectSensor.AudioSource.AudioBeams;
                        System.IO.Stream audioStream = audioBeamList[0].OpenInputStream();

                        // create the convert stream
                        _convertStream = new KinectAudioStream(audioStream);

                        Console.WriteLine("Stream created");

                        var ri = TryGetKinectRecognizer();

                        if (ri != null)
                        {
                            Console.WriteLine("Kinect recognizer exists");
                            // Create instance of the speech engine
                            _speechEngine = new SpeechRecognitionEngine(ri.Id);

                            Console.WriteLine("Speech Engine created");

                            // Load the grammar file
                            var g = new Grammar(_grammarFile);
                            _speechEngine.LoadGrammar(g);

                            Console.WriteLine("Loaded Grammar File : {0}", _grammarFile);

                            // Subscribe to events
                            _speechEngine.SpeechRecognized += SpeechRecognized;
                            _speechEngine.SpeechRecognitionRejected += SpeechRejected;

                            // let the convertStream know speech is going active
                            _convertStream.SpeechActive = true;

                            // For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model. 
                            // This will prevent recognition accuracy from degrading over time.
                            ////speechEngine.UpdateRecognizerSetting("AdaptationOn", 0);

                            _speechEngine.SetInputToAudioStream(
                                _convertStream,
                                new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
                            _speechEngine.RecognizeAsync(RecognizeMode.Multiple);

                            if (_voiceCommandPublisher != null)
                            {
                                _voiceCommandPublisher.Initialize();
                            }

                            _speechCommand = new SpeechCommand(_voiceCommandPublisher);
                            _speechCommandTask = Task.Factory.StartNew(() => _speechCommand.Run(200));

                            Console.WriteLine("Started recognizing");
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Log.Error(ex.Message);
                Log.Error(ex.StackTrace);
            }
            return false;
        }