Esempio n. 1
0
        protected override void OnCreate(Bundle bundle)
        {
            base.OnCreate(bundle);

            // Set our view from the "main" layout resource
            SetContentView(Resource.Layout.Main);
            speech = new SpeechHelper(this, "52060579");


            EditText text = FindViewById <EditText>(Resource.Id.editText1);

            text.Text = "灯,等灯等灯";

            speech.SetParameter(SpeechConstant.ENGINE_TYPE, "local");
            speech.SetParameter(SpeechSynthesizer.VOICE_NAME, "xiaoyan");
            speech.SetParameter(SpeechSynthesizer.SPEED, "40");
            speech.SetParameter(SpeechSynthesizer.PITCH, "50");
            speech.SetParameter(SpeechSynthesizer.VOLUME, "100");


            // Get our button from the layout resource,
            // and attach an event to it
            Button button = FindViewById <Button>(Resource.Id.MyButton);

            button.Click += delegate
            {
                button.Text = string.Format("{0} clicks!", count++);

                EditText text2 = FindViewById <EditText>(Resource.Id.editText1);

                speech.StartSpeaking(text2.Text);

                string speaker = speech.ParseSpeaker();
            };
        }
Esempio n. 2
0
        private async void PromptUserToSpeak()
        {
            string displayMessage = string.Empty;

            try
            {
                if (helper == null)
                {
                    helper = new SpeechHelper();
                    await helper.SetSpeechRecognizerPromptsAsync("Tell me more...", "");
                }

                SpeechRecognitionResult recognitionResult = await helper.ShowSpeechUIAsync();

                if (recognitionResult.Status == SpeechRecognitionResultStatus.Success)
                {
                    string resultText = recognitionResult.Text;
                    App.ViewModel.CurrentEntry.Details += resultText;
                }
            }
            catch (Exception ex)
            {
                displayMessage = ex.Message;
            }

            if (!string.IsNullOrEmpty(displayMessage))
            {
                await helper.DisplayMessage(displayMessage);
            }
        }
        public async Task <string> STT(string inputFilePath)
        {
            var rootPath = _hostingEnvironment.ContentRootPath;
            var text     = await SpeechHelper.RecognizeFromAudioAsync(rootPath + "\\" + inputFilePath);

            return(text);
        }
Esempio n. 4
0
        private void SpeechPopup_CancelButton_Click(object sender, RoutedEventArgs e)
        {
            switch (speechState)
            {
            case SpeechHelper.SpeechState.Initializing:
            case SpeechHelper.SpeechState.Listening:
            case SpeechHelper.SpeechState.Recognizing:
                // user tapped the cancel button

                // cancel the current operation / close the socket to the service
                SpeechHelper.Cancel(
                    new MainViewModel.NetworkOperationInProgressCallbackDelegate(SpeechPopup_NetworkOperationInProgressCallBack));

                // reset the text in the textbox
                QuickAddPopupTextBox.Text = "";
                break;

            case SpeechHelper.SpeechState.Finished:
                // user tapped the OK button

                // set the text in the popup textbox
                QuickAddPopupTextBox.Text = SpeechLabelText.Trim('\'');
                break;
            }

            SpeechPopup_Close();
        }
        public async Task <SkillResponse> Handle(AlexaRequestInformation information)
        {
            var request     = information.SkillRequest as APLSkillRequest;
            var id          = ((IntentRequest)request.Request).Intent.Slots[Consts.SlotEvent].Id();
            var currentDate = LocalDate.FromDateTime(DateTime.Now);

            var meetup = new TechMeetState {
                GroupName = id
            };
            var rawEvents = await meetup.GetEventsFromS3();

            var groupData = await meetup.GetGroupFromS3();

            var events = rawEvents.ToLocalEventTime();

            information.State.ClearSession();
            information.State.SetSession(SessionKeys.CurrentActivity, SkillActivities.Event);
            information.State.SetSession(SessionKeys.CurrentGroup, id);
            if (!events.Any())
            {
                return(SpeechHelper.NoEvent());
            }

            var eventToRecognise =
                (events.Any(l => l.Date > currentDate)
                    ? events.Where(e => e.Date > currentDate)
                    : events).First();

            information.State.SetSession(SessionKeys.CurrentEvent, eventToRecognise.Event.Id);
            return(await SpeechHelper.SingleEventResponse(request, eventToRecognise, currentDate, groupData, "I've got information on a meetup event. "));
        }
Esempio n. 6
0
        public async Task <SkillResponse> Handle(AlexaRequestInformation information)
        {
            information.State.ClearSession();
            var intent = ((IntentRequest)information.SkillRequest.Request).Intent;

            var dates       = AmazonDateParser.Parse(intent.Slots[Consts.SlotDateRange].Value);
            var currentDate = LocalDate.FromDateTime(DateTime.Now);
            var id          = intent.Slots[Consts.SlotEvent].Id();

            var meetup = new TechMeetState {
                GroupName = id
            };

            var rawEvents = await meetup.GetEventsFromS3();

            var groupData = await meetup.GetGroupFromS3();

            information.State.SetSession(SessionKeys.CurrentActivity, SkillActivities.Event);
            information.State.SetSession(SessionKeys.CurrentGroup, id);

            var eventToRecognise = rawEvents.ToLocalEventTime()
                                   .Where(d => d.Date >= dates.From && d.Date <= dates.To).Where(d => d.Date >= currentDate).ToArray();

            if (!eventToRecognise.Any())
            {
                return(SpeechHelper.NoEvent(true));
            }

            if (eventToRecognise.Length == 1)
            {
                return(await SpeechHelper.SingleEventResponse((APLSkillRequest)information.SkillRequest, eventToRecognise.First(), currentDate, groupData, "I've got information on a meetup event. "));
            }

            return(SpeechHelper.RespondToEvent(eventToRecognise, currentDate, groupData.Name));
        }
Esempio n. 7
0
        public async Task DisplaySelectedCard(IDialogContext context, IAwaitable <string> result)
        {
            _cardChoice = "";
            var userChoice = await result;
            var message    = context.MakeMessage();

            var card       = CreateAdaptiveCard(userChoice);
            var attachment = new Attachment
            {
                ContentType = AdaptiveCard.ContentType,
                Content     = card
            };

            message.Attachments.Add(attachment);

            // If channel is Cortana
            if (context.Activity.ChannelId == "cortana")
            {
                message.Speak = SpeechHelper.Speak("Unfortunately, <break strength=\"weak\"/> both <emphasis level=\"strong\">Input Form card</emphasis> and <sub alias=\"Visual Studio Team Services\">VSTS</sub> card is not yet supported for me.");
            }
            else
            {
                message.Speak = (_cardChoice == VSTSCard ? SpeechHelper.Speak("You've selected Visual Studio Team Services Card") :
                                 SpeechHelper.Speak("You've selected Input Form Card"));
            }


            await context.PostAsync(message);

            context.Wait(ContinueAdaptiveCard);
        }
 private void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
 {
     if (speech == null)
     {
         speech = new SpeechHelper(speechMediaElement);
     }
 }
Esempio n. 9
0
        public override void OnSpeech(SpeechEventArgs args)
        {
            if (m_Done)
            {
                return;
            }

            if (args.Mobile.InRange(this, 2)) //if a player is within 2 tiles of the NPC
            {
                if (args.Speech.ToLower().IndexOf("name") >= 0)
                {
                    Direction = GetDirectionTo(args.Mobile.Location);
                    Say(String.Format("My name is {0}, in your service.", Name)); //Npc tells the player it's name
                    Animate(32, 5, 1, true, false, 0);                            // Bow
                }

                else if (args.Speech.ToLower().IndexOf("hello") >= 0 || args.Speech.ToLower().IndexOf("hail") >= 0) //Checks to see if the player says Hail or Hello
                {
                    Say(String.Format("Hail "));                                                                    //Npc says hello to the player
                }
                else if (args.Speech.ToLower().IndexOf("buy") >= 0)                                                 //If player says buy
                {
                    Say(String.Format("I have nothing to sell but the rags I wear!"));                              //Npc will respond
                }
                else if (args.Speech.ToLower().IndexOf("news") >= 0)                                                //If player says news
                {
                    SpeechHelper.SayRandom(newssay, this);
                }
            }

            base.OnSpeech(args);
        }
 /// <summary>
 /// Speaks text using text-to-speech
 /// </summary>
 /// <param name="text">Text to speak</param>
 private async void Speak(string text)
 {
     await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
     {
         SpeechHelper.Speak(text);
     });
 }
        public async Task <string> TTS(string inputText)
        {
            var rootPath = _hostingEnvironment.ContentRootPath;
            var filePath = rootPath + "\\Audio\\" + Guid.NewGuid() + ".wav";
            await SpeechHelper.SynthesisToAudioAsync(inputText, filePath);

            return(filePath);
        }
Esempio n. 12
0
        // handle events associated with the Speech Popup
        private void SpeechButton_Click(object sender, RoutedEventArgs e)
        {
            // require a connection
            if (DeviceNetworkInformation.IsNetworkAvailable == false ||
                NetworkInterface.GetIsNetworkAvailable() == false)
            {
                MessageBox.Show("apologies - a network connection is required for this feature, and you appear to be disconnected :-(");
                return;
            }

            // require an account
            if (App.ViewModel.User == null)
            {
                MessageBoxResult result = MessageBox.Show(
                    "the speech feature requires an account.  create a free account now?",
                    "create account?",
                    MessageBoxButton.OKCancel);
                if (result == MessageBoxResult.Cancel)
                {
                    return;
                }

                // trace page navigation
                TraceHelper.StartMessage("ListPage: Navigate to Settings");

                // Navigate to the settings page
                NavigationService.Navigate(new Uri("/SettingsPage.xaml", UriKind.Relative));
                return;
            }

            // set the UI state to initializing state
            speechState = SpeechHelper.SpeechState.Initializing;
            SpeechSetUIState(speechState);

            // store debug / timing info
            speechStart       = DateTime.Now;
            speechDebugString = "";

            // store debug / timing info
            TimeSpan ts          = DateTime.Now - speechStart;
            string   stateString = SpeechHelper.SpeechStateString(speechState);
            string   traceString = String.Format("New state: {0}; Time: {1}; Message: {2}", stateString, ts.TotalSeconds, "Connecting Socket");

            TraceHelper.AddMessage(traceString);
            speechDebugString += traceString + "\n";

            // initialize the connection to the speech service
            SpeechHelper.Start(
                App.ViewModel.User,
                new SpeechHelper.SpeechStateCallbackDelegate(SpeechPopup_SpeechStateCallback),
                new MainViewModel.NetworkOperationInProgressCallbackDelegate(SpeechPopup_NetworkOperationInProgressCallBack));

            // open the popup
            SpeechPopup.IsOpen = true;
        }
Esempio n. 13
0
        private async Task SpeakScreenAsync()
        {
            try
            {
                await SpeechHelper.SpeakNow(Constants.PrepareToInitializeRoute());

                await stPage.FadeTo(0, 1000, Easing.SinInOut);

                await Task.Delay(100);

                await stPage.FadeTo(1, 1000, Easing.SinInOut);

                await SpeechHelper.SpeakNow(Constants.QuestionSource());

                await Task.Delay(400);

                await stPage.FadeTo(0, 1000, Easing.SinInOut);

                await Task.Delay(100);

                await stPage.FadeTo(1, 1000, Easing.SinInOut);

                await SpeechHelper.SpeakNow(Constants.Undestand());

                await SpeechHelper.SpeakNow(Constants.QuestionTarget());

                await Task.Delay(400);

                await stPage.FadeTo(0, 1000, Easing.SinInOut);

                await Task.Delay(100);

                await stPage.FadeTo(1, 1000, Easing.SinInOut);

                await SpeechHelper.SpeakNow(Constants.Undestand());

                await SpeechHelper.SpeakNow(Constants.QuestionWeight());

                await Task.Delay(400);

                await stPage.FadeTo(0, 1000, Easing.SinInOut);

                await Task.Delay(100);

                await stPage.FadeTo(1, 1000, Easing.SinInOut);

                await SpeechHelper.SpeakNow(Constants.Undestand());

                await SpeechHelper.SpeakNow(Constants.QuestionHour());
            }
            catch (Exception e)
            {
            }
        }
Esempio n. 14
0
 private void bwBanCo_DoWork(object sender, DoWorkEventArgs e)
 {
     try
     {
         SpeechHelper.Do(string.Format(EnVangManagement.audioMessages[grbForm.Text], lblPrivateCode.Text));
     }
     catch (Exception ex)
     {
         LogError.WriteLogError("frmInfo_Shown Audio : ", ex);
     }
 }
        public async Task <IActionResult> Upload()
        {
            var inputFile  = Request.Form.Files["fileUploader"];
            var outputType = Request.Form["OutputType"].ToString();
            var fileObj    = await Upload(inputFile);

            var text = await SpeechHelper.RecognizeFromAudioMP3Async(fileObj.FilePath);

            System.IO.File.Delete(fileObj.FilePath);
            return(await handle(text, outputType != "1"));
        }
Esempio n. 16
0
 private void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
 {
     if (speech == null)
     {
         speech = new SpeechHelper(speechMediaElement);
     }
     else
     {
         // Prevents media element from creating again the SpeechHelper when user signed off
         speechMediaElement.AutoPlay = false;
     }
 }
Esempio n. 17
0
    private void UpdateSpeech()
    {
        if (speech1IsDirection)
        {
            speechIcon1 = SpeechHelper.GetIcon_Direction(Accused.transform.position.x - transform.position.x);
        }

        if (speech2IsDirection)
        {
            speechIcon2 = SpeechHelper.GetIcon_Direction(Accused.transform.position.x - transform.position.x);
        }
    }
Esempio n. 18
0
        public async Task SpeakGimmick(CommandContext context, params string[] args)
        {
            await context.TriggerTypingAsync();

            if (CanVoice && textToSpeechHelper != null)
            {
                await SpeechHelper.Speak(textToSpeechHelper, context, Replace(args), overrideLimit : true);

                return;
            }
            await context.RespondAsync("Some things don't need saying homie");
        }
        private async void InitiateSpeech(string textToSpeech)
        {
            helper = new SpeechHelper();
            SpeechSynthesisStream synthesisStream = await helper.SpeakAsync(textToSpeech);

            if (synthesisStream != null)
            {
                this.media.AutoPlay = true;
                this.media.SetSource(synthesisStream, synthesisStream.ContentType);
                this.media.Play();
            }
        }
Esempio n. 20
0
 /// <summary>
 /// Triggered when media element used to play synthesized speech messages is loaded.
 /// Initializes SpeechHelper and greets user.
 /// </summary>
 private void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
 {
     if (speech == null)
     {
         speech = new SpeechHelper(speechMediaElement);
     }
     else
     {
         // Prevents media element from re-greeting visitor
         speechMediaElement.AutoPlay = false;
     }
 }
Esempio n. 21
0
        private void SpeechPopup_SpeechStateCallback(SpeechHelper.SpeechState state, string message)
        {
            speechState = state;
            SpeechSetUIState(speechState);

            // store debug / timing info
            TimeSpan ts          = DateTime.Now - speechStart;
            string   stateString = SpeechHelper.SpeechStateString(state);
            string   traceString = String.Format("New state: {0}; Time: {1}; Message: {2}", stateString, ts.TotalSeconds, message);

            TraceHelper.AddMessage(traceString);
            speechDebugString += traceString + "\n";
        }
Esempio n. 22
0
        private async Task NotifyCameraPanelAsync()
        {
            // Avoids to notify if the camera panel is the same.
            if (streamingService.CameraPanel != lastCameraPanel)
            {
                var message = streamingService.CameraPanel == CameraPanel.Front ? AppResources.FrontCameraReady : AppResources.BackCameraReady;
                StatusMessage = message;

                await SpeechHelper.TrySpeechAsync(message);

                lastCameraPanel = streamingService.CameraPanel;
            }
        }
Esempio n. 23
0
        private async Task MessageReceivedStart(IDialogContext context, IAwaitable <IMessageActivity> result)
        {
            // Creates a Card within a dialog
            var replyToConversation = context.MakeMessage();

            replyToConversation.Speak = SpeechHelper.Speak("<s>Hi, welcome to Visual Card Bot. What can I do <break strength=\"weak\"/> for you?</s>");

            // Converts the card into an Attachment object and add it to Attachments list
            replyToConversation.Attachments.Add(CreateGreetingsCard());
            await context.PostAsync(replyToConversation);

            context.Wait(MessageReceivedAsync);
        }
Esempio n. 24
0
 /// <summary>
 /// Triggered when media element used to play synthesized speech messages is loaded.
 /// Initializes SpeechHelper and greets user.
 /// </summary>
 private async void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
 {
     if (speech == null)
     {
         speech = new SpeechHelper(speechMediaElement);
         await speech.Read(SpeechContants.InitialGreetingMessage);
     }
     else
     {
         // Prevents media element from re-greeting visitor
         speechMediaElement.AutoPlay = false;
     }
 }
Esempio n. 25
0
        private void DoSingleOrder(object obj)
        {
            if (!this.VerifyLogin())
            {
                return;
            }
            var    model   = obj as InstrumentModel;
            string groupID = Guid.NewGuid().ToString();
            var    order   = this.GenerateOrder(model, groupID);

            TradeAdapter.Instance.InsertOrder(order);
            SpeechHelper.Speak("Order");
        }
Esempio n. 26
0
        private async void PromptUserToSpeak()
        {
            string displayMessage = string.Empty;

            try
            {
                if (helper == null)
                {
                    helper = new SpeechHelper();
                    await helper.SetSpeechRecognizerPromptsAsync("What would you like to do?", "Ex: new journal entry, show last entry");
                }

                SpeechRecognitionResult recognitionResult = await helper.ShowSpeechUIAsync();

                if (recognitionResult.Status == SpeechRecognitionResultStatus.Success)
                {
                    string result       = recognitionResult.Text;
                    string voiceCommand = (result.ToLower().Contains("new journal entry") || result.ToLower().Contains("add entry")) ? "AddEntry" :
                                          (result.ToLower().Contains("dear diary")) ? "EagerEntry" :
                                          ((result.ToLower().StartsWith("view") || result.ToLower().StartsWith("show")) && result.ToLower().Contains("entry")) ? "ViewEntry" : "";

                    switch (voiceCommand)
                    {
                    case "ViewEntry":
                        this.Frame.Navigate(typeof(ViewDiaryEntry), result);
                        break;

                    case "AddEntry":
                        this.Frame.Navigate(typeof(AddDiaryEntry), "");
                        break;

                    case "EagerEntry":
                        this.Frame.Navigate(typeof(AddDiaryEntry), result);
                        break;

                    default:
                        displayMessage = "Didn't understand that. Try again.";
                        break;
                    }
                }
            }
            catch (Exception ex)
            {
                displayMessage = ex.Message;
            }

            if (!string.IsNullOrEmpty(displayMessage))
            {
                await helper.DisplayMessage(displayMessage);
            }
        }
Esempio n. 27
0
 private async void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
 {
     if (speech == null)
     {
         sanyaResponses = new SanyaResponses();
         speech         = new SpeechHelper(speechElement);
         await speech.Read(sanyaResponses.returnResponse());
     }
     else
     {
         // Prevents media element from re-greeting visitor
         speechElement.AutoPlay = false;
     }
 }
Esempio n. 28
0
        //private async void OnTimerTick(object state)
        //{
        //    //GetPhotoFromCam();
        //}

        /*
         * async void GetPhotoFromCam()
         * {
         *  if (!IsWebCamReady) return;
         *
         *  var photo = await TakePhoto();
         *  //call computer vision
         *  if (photo == null) return;
         *
         *  var result = await ApiContainer.GetApi<ComputerVisionService>().GetImageAnalysis(photo);
         *  if (result != null)
         *  {
         *      var item = new TonyVisionObj();
         *      if (result.Adult != null)
         *      {
         *          item.adultContent = result.Adult.IsAdultContent.ToString();
         *          item.adultScore = result.Adult.AdultScore.ToString();
         *      }
         *      else
         *      {
         *          item.adultContent = "False";
         *          item.adultScore = "0";
         *      }
         *
         *      if (result.Faces != null && result.Faces.Length > 0)
         *      {
         *          int count = 0;
         *          item.facesCount = result.Faces.Count();
         *          foreach (var face in result.Faces)
         *          {
         *              count++;
         *              if (count > 1)
         *              {
         *                  item.facesDescription += ",";
         *              }
         *              item.facesDescription += $"[Face : {count}; Age : { face.Age }; Gender : {face.Gender}]";
         *
         *          }
         *      }
         *      else
         *          item.facesCount = 0;
         *
         *
         *
         *      if (result.Description != null)
         *      {
         *          var Speak = "";
         *          foreach (var caption in result.Description.Captions)
         *          {
         *              Speak += $"[Caption : {caption.Text }; Confidence : {caption.Confidence};],";
         *          }
         *          string tags = "[Tags : ";
         *          foreach (var tag in result.Description.Tags)
         *          {
         *              tags += tag + ", ";
         *          }
         *          Speak += tags + "]";
         *          item.description = Speak;
         *      }
         *
         *      if (result.Tags != null)
         *      {
         *
         *          foreach (var tag in result.Tags)
         *          {
         *              item.tags += "[ Name : " + tag.Name + "; Confidence : " + tag.Confidence + "; Hint : " + tag.Hint + "], ";
         *          }
         *      }
         *      var IsUpload = false;
         *      if (item.description != null)
         *      {
         *          if (item.description.ToLower().Contains("person") || item.description.ToLower().Contains("people"))
         *          {
         *              IsUpload = true;
         *          }
         *      }
         *      if (item.tags != null)
         *      {
         *          if (item.tags.ToLower().Contains("man") || item.tags.ToLower().Contains("woman"))
         *          {
         *              IsUpload = true;
         *          }
         *      }
         *      if (IsUpload)
         *      {
         *          var uploadRes = await BlobEngine.UploadFile(photo);
         *          Debug.WriteLine($"upload : {uploadRes}");
         *      }
         *      item.tanggal = DateTime.Now;
         *      var JsonObj = new StringContent(JsonConvert.SerializeObject(item), Encoding.UTF8, "application/json");
         *      var res = await httpClient.PostAsync(APPCONTANTS.ApiUrl, JsonObj);
         *      if (res.IsSuccessStatusCode)
         *      {
         *          Debug.WriteLine("vision captured");
         *
         *      }
         *
         *
         *  }
         * }*/
        /// <summary>
        /// Triggered when media element used to play synthesized speech messages is loaded.
        /// Initializes SpeechHelper and greets user.
        /// </summary>
        private async void speechMediaElement_Loaded(object sender, RoutedEventArgs e)
        {
            if (speech == null)
            {
                speech = new SpeechHelper(speechMediaElement);

                await speech.Read("tony is ready to serve");
            }
            else
            {
                // Prevents media element from re-greeting visitor
                speechMediaElement.AutoPlay = false;
            }
        }
Esempio n. 29
0
    public void DrunkenAccuse()
    {
        Icon_Big.gameObject.SetActive(true);
        Icon_Left.gameObject.SetActive(true);
        Icon_Right.gameObject.SetActive(true);

        Icon_Big.SetEmotion(SpeechEmotion.Loopy);
        Icon_Left.SetEmotion(SpeechEmotion.Drunk);
        Icon_Right.SetEmotion(SpeechEmotion.Drunk);

        Icon_Big.transform.parent.localScale = new Vector3(-1.0f, 1.0f, 1.0f);

        Icon_Left.SetIcon(SpeechHelper.GetIcon_Certainty(false));
        Icon_Right.SetIcon(SpeechHelper.DrunkSprite);
    }
Esempio n. 30
0
        private async Task NotifyInitializationErrorAsync(Exception error = null)
        {
            // If the app is already initialized, skips the notification error.
            if (!initialized)
            {
                var errorMessage = AppResources.InitializationError;
                if (error != null && Settings.ShowExceptionOnError)
                {
                    errorMessage = $"{errorMessage} ({error.Message})";
                }

                StatusMessage = errorMessage;

                await SpeechHelper.TrySpeechAsync(errorMessage);
            }
        }
Esempio n. 31
0
        public Form1()
        {
            InitializeComponent();

            //var clt = ;

            //var vlt = clt.DisplayName;

            helper = new SpeechHelper(CultureInfo.CurrentCulture.Name);//
            //helper.SetInputCulture();
            helper.GatherConverters(typeof(Form1), this);

            //helper.OnParameterValueConvert = (p, value) =>
            //{
            //    int i;
            //    if(int.TryParse(value, out i))
            //    {
            //        return i;
            //    }
            //    return null;
            //};

            //grab some speech methods
            //helper.GatherCommands(Assembly.GetExecutingAssembly());
            helper.GatherCommands(typeof(Form1), this);

            helper.AddSimpleCommand("de-de", "abbrechen", "abc", () =>
            {
                if (helper.State == ExecutingState.ListeningForParameterValue)
                {
                    helper.Speak("ab jetzt wird wieder ausschau nach parametern gehalten");
                    helper.AbortListeningForCurrentParameterValue();
                }
                else if (helper.State == ExecutingState.ListeningForParameters)
                {
                    helper.Speak("ab jetzt werden wieder methoden überwacht");
                    helper.AbortListeningForParameters();

                }
            });

            //helper.ChangeSpeechGroup("groupKey", false)

            helper.OnBeforeSimpleCommandInvoked.Subscribe(p =>
            {
                if (checkBox1.Checked)
                    helper.Speak("simpler befehlt " + p.Text + " wird ausgeführt");
            });

            helper.OnAfterSimpleCommandInvoked.Subscribe(p =>
            {
                if (checkBox1.Checked)
                    helper.Speak("simpler befehl " + p.Text + " wurde ausgeführt");
            });

            //helper.ChangeCommand("", false);
            //helper.ChangeSimpleCommand("de-de", "abbrechen", false);

            //helper.ChangeSpeechGroup("", true);
            //helper.ChangeSimpleSpeechGroup("de-de", "abc", true);

            helper.AddSimpleCommand("de-de", "los", "", () =>
            {
                textBox1.Text += "los" + Environment.NewLine;

            });
            /*
               helper.AddSimpleCommand("de-de", "ja", () =>
               {
               helper.ChangeSimpleCommand("de-de", "los", false);
               });

               helper.AddSimpleCommand("de-de", "nein", () =>
               {
               helper.ChangeSimpleCommand("de-de", "los", true);
               });
            * */

            //build the speech recognition (words)
            helper.RebuildAllCommands();

            helper.OnTextRecognized.Subscribe(p =>
            {
                listBox1.Items.Add(p.Text);
                listBox1.SelectedIndex = listBox1.Items.Count - 1;
                //helper.Speak("ok");
            });

            helper.OnListeningParameters.Subscribe(tuple =>
            {
                textBox1.Text += tuple.RecognizedText + "(";
                if (checkBox1.Checked)
                    helper.Speak("jetzt die parameter");
            });

            helper.OnParameterRecognized.Subscribe(p =>
            {
                textBox1.Text += p.RecognizedParameterNameText + ": ";
                if (checkBox1.Checked)
                    helper.Speak("wert für " + p.SpeechParameterInfo.Parameter.ParameterInfo.Name + " wird erwartet");
            });

            helper.OnParameterFinished.Subscribe(p =>
            {
                textBox1.Text += p.SpeechParameterInfo.Value + ", ";
                if (checkBox1.Checked)
                    helper.Speak("parameter " + p.SpeechParameterInfo.Parameter.ParameterInfo.Name + " fertig");
            });

            helper.OnBeforeMethodInvoked.Subscribe(p =>
            {
                //p.Method.
                if (checkBox1.Checked)
                    helper.Speak("methode wird ausgeführt");
            });

            helper.OnLastParameterFinished.Subscribe(p =>
            {
                textBox1.Text += ")" + Environment.NewLine;
                if (checkBox1.Checked)
                    helper.Speak("methode " + p.RecognizedText + " wird jetzt ausgeführt");
            });

            items = new List<string>();

            var mmmMax = 11;
            for (int i = 0; i < mmmMax; i++)
            {
                items.Add("item " + (10 - i).ToString());
                listBox2.Items.Add(items[i]);

                if (i == mmmMax - 1)
                    helper.AddPlainPhrase(true,items[i]);
                else
                    helper.AddPlainPhrase(false,items[i]);
            }

            //de
            //helper.AddPlainPhrase(true, "rot","blau");

            //en
            helper.AddPlainPhrase(true, "red","blue");

            //helper.ChangeCommand("", false);
            var test = helper.AllCommands.Commands;

            //helper.AddCommand();
        }