// Use this for initialization
    void Start()
    {
        // テキストを渡すスクリプトの参照
        textMesh   = GameObject.Find("Word");
        controller = textMesh.GetComponent <TextController>();

        // 音声認識セッションの初期化
        session = PXCMSession.CreateInstance();
        source  = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        source.QueryDeviceInfo(1, out dinfo);
        source.SetDevice(dinfo);
        Debug.Log(dinfo.name);

        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => controller.SetText(x.scores[0].sentence);
        sr.SetDictation();
        sr.StartRec(source, handler);
    }
示例#2
0
    public void Start()
    {
        filePath = Application.dataPath + @"\LogData.txt";
        File.CreateText(filePath);

        //インスタンスの生成
        session = PXCMSession.CreateInstance();
        //音声データの入力
        source = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        //デバイスを検出して出力
        source.QueryDeviceInfo(0, out dinfo);
        source.SetDevice(dinfo);
        Debug.Log(dinfo.name);

        //音声認識
        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        //音声認識の初期設定
        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        //handlerにメソッドを渡す工程
        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => Dataoutput(x.scores[0].sentence, x.duration);
        sr.SetDictation();
    }
示例#3
0
        /// <summary>
        /// Configures the speech recognition.
        /// </summary>
        private bool ConfigureSpeechRecognition()
        {
            /* Create the AudioSource instance */
            source = session.CreateAudioSource();

            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);

            /* Set Audio Source */
            source.SetDevice(sourceDeviceInfo[_activeSource]);

            /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid = modulesIuID[_activeModule];

            pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                /* Configure */
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(_activeLanguage, out pinfo);
                sr.SetProfile(pinfo);

                /* Set Command/Control or Dictation */
                if (SpeechModuleMode == SpeechModuleModeType.CommandControl)
                {
                    string[] cmds = Commands;
                    if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        // voice commands available, use them
                        sr.BuildGrammarFromStringList(1, cmds, null);
                        sr.SetGrammar(1);
                    }
                    else
                    {
                        Debug.Log("Speech Command List Empty!");
                        SetError(SpeechManagerErrorType.VoiceThreadError_CommandsListEmpty);

                        //Switch to dictaction mode
                        //SpeechModuleMode = SpeechModuleModeType.Dictation;
                        //sr.SetDictation();
                    }
                }
                else
                {
                    sr.SetDictation();
                }
            }
            else
            {
                Debug.Log("VoiceThreadError - InitFailed - CreateImpl!");
                SetError(SpeechManagerErrorType.VoiceThreadError_InitFailed_CreateImpl);
                return(false);
            }
            return(true);
        }
    void initSession(PXCMSession session)
    {
        if (source == null)
        {
            Debug.Log("Source was null!  No audio device?");
            return;
        }

        // Set audio volume to 0.2
        source.SetVolume(setVolume);

        // Set Audio Source
        Debug.Log("Using device: " + device.name);
        source.SetDevice(device);

        // Set Module
        PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
        mdesc.iuid = 0;

        pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

        if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            // Configure
            PXCMSpeechRecognition.ProfileInfo pinfo;
            // Language
            sr.QueryProfile(0, out pinfo);
            Debug.Log(pinfo.language);
            sr.SetProfile(pinfo);

            // Set Command/Control or Dictation
            sr.SetDictation();

            // Initialization
            Debug.Log("Init Started");
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            handler.onAlert       = OnAlert;

            sts = sr.StartRec(source, handler);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.Log("Voice Rec Started");
            }
            else
            {
                Debug.Log("Voice Rec Start Failed");
            }
        }
        else
        {
            Debug.Log("Voice Rec Session Failed");
        }
    }
        private void ConfigureRealSenseSpeech()
        {
            // Instantiate session and audio source objects
            session     = PXCMSession.CreateInstance();
            audioSource = session.CreateAudioSource();

            // Select the first audio device
            PXCMAudioSource.DeviceInfo deviceInfo;
            deviceInfo = new PXCMAudioSource.DeviceInfo();
            audioSource.QueryDeviceInfo(0, out deviceInfo);
            audioSource.SetDevice(deviceInfo);

            // Set the audio recording volume
            audioSource.SetVolume(0.2f);

            // Create a speech recognition instance
            session.CreateImpl <PXCMSpeechRecognition>(out speechRecognition);

            // Initialize the speech recognition module
            PXCMSpeechRecognition.ProfileInfo profileInfo;
            speechRecognition.QueryProfile(0, out profileInfo);
            profileInfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH;
            speechRecognition.SetProfile(profileInfo);

            // Build and set the active grammar
            pxcmStatus status = speechRecognition.BuildGrammarFromFile(1, PXCMSpeechRecognition.GrammarFileType.GFT_JSGF, "grammarsvm.jsgf");

            if (status == pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                speechRecognition.SetGrammar(1);
            }
            else
            {
                MessageBox.Show("Java Speech Grammar Format (JSGF) file not found!");
                this.Close();
            }

            // Display device information
            //lblDeviceInfo.Content = string.Format("[Device: {0}, Language Profile: {1}]", deviceInfo.name, profileInfo.language);

            // Set the speech recognition handler
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            speechRecognition.StartRec(null, handler);
        }
示例#6
0
    public void Start()
    {
        session = PXCMSession.CreateInstance();
        source  = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        source.QueryDeviceInfo(1, out dinfo);
        source.SetDevice(dinfo);

        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => Debug.Log(x.scores[0].sentence);
        sr.SetDictation();
        sr.StartRec(source, handler);
    }
示例#7
0
    private bool ConfigureSpeechRecognition()
    {
        /* Create the AudioSource instance */
        source = session.CreateAudioSource();

        /* Set audio volume to 0.2 */
        source.SetVolume(0.2f);

        /* Set Audio Source */
        source.SetDevice(sourceDeviceInfo[_activeSource]);

        pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

        if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            /* Configure */
            PXCMSpeechRecognition.ProfileInfo pinfo;
            sr.QueryProfile(_activeLanguage, out pinfo);
            sr.SetProfile(pinfo);

            /* Set Command/Control or Dictation */
            string[] cmds = new String[4] {
                "Create", "Save", "Load", "Run"
            };
            if (cmds != null && cmds.GetLength(0) != 0)
            {
                // voice commands available, use them
                sr.BuildGrammarFromStringList(1, cmds, null);
                sr.SetGrammar(1);
            }
        }
        else
        {
            Debug.Log("VoiceThreadError - InitFailed - CreateImpl!");
            return(false);
        }
        return(true);
    }
示例#8
0
        private PXCMAudioSource FindAudioSource()
        {
            PXCMAudioSource audioSource = _session.CreateAudioSource();

            audioSource.ScanDevices();
            int devicesCount = audioSource.QueryDeviceNum();
            var deviceIndex  = 0;

            PXCMAudioSource.DeviceInfo deviceInfo;
            for (int i = 0; i < devicesCount; i++)
            {
                audioSource.QueryDeviceInfo(i, out deviceInfo);
                if (deviceInfo.name.Contains("Array"))
                {
                    deviceIndex = i;
                    break;
                }
            }
            audioSource.QueryDeviceInfo(deviceIndex, out deviceInfo);
            audioSource.SetDevice(deviceInfo);
            audioSource.SetVolume(0.8f);
            return(audioSource);
        }
示例#9
0
        public void DoIt(MainForm form1, PXCMSession session)
        {
            form = form1;

            /* Create the AudioSource instance */
            source = session.CreateAudioSource();

            if (source == null)
            {
                CleanUp();
                form.PrintStatus("Stopped");
                return;
            }

            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);

            /* Set Audio Source */
            source.SetDevice(form.GetCheckedSource());

            /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid = form.GetCheckedModule();

            pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                /* Configure */
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(form.GetCheckedLanguage(), out pinfo);
                sr.SetProfile(pinfo);

                /////////////////////////////////////////////////////////////////////////////////////////////

                ////////////////////////////////////////////////////////////////////////////////////////////


                /* Set Command/Control or Dictation */
                if (form.IsCommandControl())
                {
                    string[] cmds = form.GetCommands();
                    if (form.g_file != null && form.g_file.Length != 0)
                    {
                        if (form.g_file.EndsWith(".list"))
                        {
                            form.FillCommandListConsole(form.g_file);
                            cmds = form.GetCommands();
                            if (cmds.GetLength(0) == 0)
                            {
                                form.PrintStatus("Command List Load Errors");
                            }
                        }

                        // input Command/Control grammar file available, use it
                        if (!SetGrammarFromFile(form.g_file))
                        {
                            form.PrintStatus("Can not set Grammar From File.");
                            CleanUp();
                            return;
                        }
                        ;
                    }
                    else if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        // voice commands available, use them
                        sts = sr.BuildGrammarFromStringList(1, cmds, null);
                        sts = sr.SetGrammar(1);
                    }
                    else
                    {
                        form.PrintStatus("No Command List. Dictation instead.");
                        if (form.v_file != null && form.v_file.Length != 0)
                        {
                            SetVocabularyFromFile(form.v_file);
                        }
                        sts = sr.SetDictation();
                    }
                }
                else
                {
                    if (form.v_file != null && form.v_file.Length != 0)
                    {
                        SetVocabularyFromFile(form.v_file);
                    }
                    sts = sr.SetDictation();
                }

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Can't start recognition.");
                    CleanUp();
                    return;
                }

                /* Initialization */
                form.PrintStatus("Init Started");
                form.PutLabel1Text("初期化中...");
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                handler.onAlert       = OnAlert;

                sts = sr.StartRec(source, handler);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Init OK");
                    //					form.PutLabel1Text("認識中...");
                    form.PutLabel1Text("マイクに向かって話してください");

                    /* Wait until the stop button is clicked */
                    while (!form.IsStop())
                    {
                        System.Threading.Thread.Sleep(5);
                    }

                    sr.StopRec();
                }
                else
                {
                    form.PrintStatus("Failed to initialize");
                }
            }
            else
            {
                form.PrintStatus("Init Failed");
            }

            CleanUp();
            form.PrintStatus("Stopped");
        }
        // 音声認識を初期化する
        private void InitializeSpeechRecognition()
        {
            pxcmStatus sts;
            var        session = senseManager.QuerySession();

            // 音声入力デバイスを作成する
            audioSource = session.CreateAudioSource();
            if (audioSource == null)
            {
                throw new Exception("音声入力デバイスの作成に失敗しました");
            }

            // 音声入力デバイスを列挙する
            TextDesc.Text  = "";
            TextDesc.Text += "音声入力デバイス\n";

            PXCMAudioSource.DeviceInfo device = null;

            audioSource.ScanDevices();
            for (int i = 0;; ++i)
            {
                PXCMAudioSource.DeviceInfo dinfo;
                sts = audioSource.QueryDeviceInfo(i, out dinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 音声入力デバイス名を表示する
                TextDesc.Text += "\t" + dinfo.name + "\n";

                // 最初のデバイスを使う
                if (i == 0)
                {
                    device = dinfo;
                }
            }

            // 音声入力デバイスを設定する
            sts = audioSource.SetDevice(device);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声入力デバイスの設定に失敗しました");
            }


            // 音声認識エンジンを列挙する
            TextDesc.Text += "音声認識エンジン\n";

            PXCMSession.ImplDesc inDesc  = new PXCMSession.ImplDesc();
            PXCMSession.ImplDesc outDesc = null;
            PXCMSession.ImplDesc desc    = null;
            inDesc.cuids[0] = PXCMSpeechRecognition.CUID;

            for (int i = 0; ; ++i)
            {
                // 音声認識エンジンを取得する
                sts = session.QueryImpl(inDesc, i, out outDesc);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 音声認識エンジン名称を表示する
                TextDesc.Text += "\t" + outDesc.friendlyName + "\n";

                // 最初の音声認識エンジンを使う
                if (i == 0)
                {
                    desc = outDesc;
                }
            }

            // 音声認識エンジンオブジェクトを作成する
            sts = session.CreateImpl <PXCMSpeechRecognition>(desc, out recognition);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識エンジンオブジェクトの作成に失敗しました");
            }

            // 対応言語を列挙する
            PXCMSpeechRecognition.ProfileInfo profile = null;

            for (int j = 0;; ++j)
            {
                // 音声認識エンジンが持っているプロファイルを取得する
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sts = recognition.QueryProfile(j, out pinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 対応言語を表示する
                TextDesc.Text += "\t\t" + LanguageToString(pinfo.language) + "\n";

                // 英語のエンジンを使う(日本語対応時には日本語に変更する)
                if (pinfo.language == PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH)
                {
                    profile = pinfo;
                }
            }

            if (profile == null)
            {
                throw new Exception("選択した音声認識エンジンが見つかりませんでした");
            }

            // 使用する言語を設定する
            sts = recognition.SetProfile(profile);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識エンジンオブジェクトの設定に失敗しました");
            }

            // コマンドモードを設定する
            SetCommandMode();

            // 音声認識の通知ハンドラを作成する
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;

            // 音声認識を開始する
            sts = recognition.StartRec(audioSource, handler);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識の開始に失敗しました");
            }
        }
        public void DoIt(MainForm form1, PXCMSession session, PXCMAudioSource s)
        {
            Debug.Log("DoIt");
            form = form1;
            Debug.Log("DoIt:01");
            /* Create the AudioSource instance */
            //source = s;
            source =session.CreateAudioSource();
            Debug.Log("DoIt:02");
            if (source == null) {
                CleanUp();
                form.PrintStatus("Stopped");
                return;
            }
            Debug.Log("DoIt:03");
            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);
            Debug.Log("DoIt:04");
            /* Set Audio Source */
            source.SetDevice(form.GetCheckedSource());
            Debug.Log("DoIt:05");
            /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid = form.GetCheckedModule();
            Debug.Log("DoIt:06");
            pxcmStatus sts = session.CreateImpl<PXCMSpeechRecognition>(out sr);
            Debug.Log("DoIt:07");

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.Log("DoIt:10");
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(form.GetCheckedLanguage(), out pinfo);
                sr.SetProfile(pinfo);

                if (form.IsCommandControl())
                {
                    Debug.Log("DoIt:20");
                    string[] cmds = form.GetCommands();
                    if (form.g_file != null && form.g_file.Length != 0)
                    {
                        Debug.Log("DoIt:30");
                        if (form.g_file.EndsWith(".list")){
                            Debug.Log("DoIt:40");
                            form.FillCommandListConsole(form.g_file);
                            cmds = form.GetCommands();
                            if (cmds.GetLength(0) == 0)
                                form.PrintStatus("Command List Load Errors");
                        }

                        // input Command/Control grammar file available, use it
                        if (!SetGrammarFromFile(form.g_file))
                        {
                            Debug.Log("DoIt:41");
                            form.PrintStatus("Can not set Grammar From File.");
                            CleanUp();
                            return;
                        };
                    }
                    else if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        Debug.Log("DoIt:31");
                        // voice commands available, use them
                        sts = sr.BuildGrammarFromStringList(1, cmds, null);
                        sts = sr.SetGrammar(1);
                    } else {
                        Debug.Log("DoIt:32");
                        form.PrintStatus("No Command List. Dictation instead.");
                        if (form.v_file != null && form.v_file.Length != 0) SetVocabularyFromFile(form.v_file);
                        sts = sr.SetDictation();
                    }
                }
                else
                {
                    Debug.Log("DoIt:21");
                    if (form.v_file != null && form.v_file.Length != 0) SetVocabularyFromFile(form.v_file);
                    sts = sr.SetDictation();
                }

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Can't start recognition.");
                    CleanUp();
                    return;
                }

                form.PrintStatus("Init Started");
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition=OnRecognition;
                handler.onAlert=OnAlert;

                sts=sr.StartRec(source, handler);
                if (sts>=pxcmStatus.PXCM_STATUS_NO_ERROR) {
                    form.PrintStatus("Init OK");

                    // Wait until the stop button is clicked
                    while (!form.IsStop()) {
                        System.Threading.Thread.Sleep(5);
                    }

                    sr.StopRec();
                } else {
                    form.PrintStatus("Failed to initialize");
                }
            } else {
                form.PrintStatus("Init Failed");
            }

            Debug.Log("DoIt:98");
            CleanUp();
            form.PrintStatus("Stopped");
            Debug.Log("DoIt:99");
        }
示例#12
0
        public MainWindow()
        {
            InitializeComponent();

            #region Hand
            nodes = new PXCMHandData.JointData[][] { new PXCMHandData.JointData[0x20], new PXCMHandData.JointData[0x20] };
            xValues = new float[arraySize];
            yValues = new float[arraySize];
            zValues = new float[arraySize];
            #endregion Hand

            // Setto la modalita' test per la guida del drone ON/OFF
            TestModeCheck.IsChecked = true;

            genericItems = new ObservableCollection<GenericItem>();

            se = PXCMSession.CreateInstance();

            if (se != null)
            {
                //processingThread = new Thread(new ThreadStart(ProcessingHandThread));
                //senseManager = PXCMSenseManager.CreateInstance();
                //senseManager.EnableHand();
                //senseManager.Init();
                //ConfigureHandModule();
                //processingThread.Start();



                // session is a PXCMSession instance.
                audiosource = se.CreateAudioSource();
                // Scan and Enumerate audio devices
                audiosource.ScanDevices();

                PXCMAudioSource.DeviceInfo dinfo = null;

                for (int d = audiosource.QueryDeviceNum() - 1; d >= 0; d--)
                {
                    audiosource.QueryDeviceInfo(d, out dinfo);
                }
                audiosource.SetDevice(dinfo);

                se.CreateImpl<PXCMSpeechRecognition>(out sr);
              

                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(0, out pinfo);
                sr.SetProfile(pinfo);

                // sr is a PXCMSpeechRecognition instance.
                String[] cmds = new String[] { "Takeoff", "Land", "Rotate Left", "Rotate Right", "Advance",
                    "Back", "Up", "Down", "Left", "Right", "Stop" , "Dance"};
                int[] labels = new int[] { 1, 2, 4, 5, 8, 16, 32, 64, 128, 256, 512, 1024 };
                // Build the grammar.
                sr.BuildGrammarFromStringList(1, cmds, labels);
                // Set the active grammar.
                sr.SetGrammar(1);
                // Set handler

                RecognitionHandler = new PXCMSpeechRecognition.Handler();

                RecognitionHandler.onRecognition = OnRecognition;

                Legenda.Items.Add("------ Available Commands ------");
                foreach (var cmd in cmds)
                {
                    Legenda.Items.Add(cmd);
                }
            }
        }
示例#13
0
        static void SetupRecognizer(PXCMSession session)
        {
            PXCMAudioSource.DeviceInfo dinfo = null;
            if (session != null)
            {
                #region Audio Source

                // session is a PXCMSession instance.
                source = session.CreateAudioSource();

                // Scan and Enumerate audio devices
                source.ScanDevices();

                for (int d = source.QueryDeviceNum() - 1; d >= 0; d--)
                {
                    source.QueryDeviceInfo(d, out dinfo);

                    // Select one and break out of the loop
                    break;
                }
                if (dinfo != null)
                {
                    // Set the active device
                    source.SetDevice(dinfo);
                }

                #endregion

                #region Recognizer Instance

                pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    return;
                }

                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(0, out pinfo);
                sr.SetProfile(pinfo);

                //sr.SetDictation();

                #endregion

                #region Grammar
                Perintah = new Dictionary <string, Module>();
                // sr is a PXCMSpeechRecognition instance.
                using (var data = new JONGOS_DBEntities())
                {
                    var listCommand = from c in data.Modules
                                      orderby c.ID
                                      select c;
                    foreach (var item in listCommand.Distinct())
                    {
                        Perintah.Add(item.VoiceCommand, item);
                    }
                }
                List <string> cmds = new List <string>();
                foreach (var cmd in Perintah.Keys)
                {
                    cmds.Add(cmd);
                }

                // Build the grammar.
                sr.BuildGrammarFromStringList(1, cmds.ToArray(), null);

                // Set the active grammar.
                sr.SetGrammar(1);
                #endregion

                #region recognition Event
                // Set handler
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                //handler.onAlert = OnAlert;
                // sr is a PXCMSpeechRecognition instance
                pxcmStatus stsrec = sr.StartRec(source, handler);
                if (stsrec < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    Console.WriteLine("Recognizer error!");
                }


                #endregion
            }
        }
        // 音声認識を初期化する
        private void InitializeSpeechRecognition()
        {
            pxcmStatus sts;
            var session = senseManager.QuerySession();

            // 音声入力デバイスを作成する
            audioSource = session.CreateAudioSource();
            if ( audioSource == null ){
                throw new Exception( "音声入力デバイスの作成に失敗しました" );
            }

            // 音声入力デバイスを列挙する
            TextDesc.Text = "";
            TextDesc.Text += "音声入力デバイス\n";

            PXCMAudioSource.DeviceInfo device = null;

            audioSource.ScanDevices();
            for ( int i = 0;; ++i ) {
                PXCMAudioSource.DeviceInfo dinfo;
                sts = audioSource.QueryDeviceInfo( i, out dinfo );
                if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                    break;
                }

                // 音声入力デバイス名を表示する
                TextDesc.Text += "\t" + dinfo.name + "\n";

                // 最初のデバイスを使う
                if ( i == 0 ){
                    device = dinfo;
                }
            }

            // 音声入力デバイスを設定する
            sts = audioSource.SetDevice( device );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声入力デバイスの設定に失敗しました" );
            }

            // 音声認識エンジンを列挙する
            TextDesc.Text += "音声認識エンジン\n";

            PXCMSession.ImplDesc inDesc = new PXCMSession.ImplDesc();
            PXCMSession.ImplDesc outDesc = null;
            PXCMSession.ImplDesc desc = null;
            inDesc.cuids[0] = PXCMSpeechRecognition.CUID;

            for ( int i = 0; ; ++i ) {
                // 音声認識エンジンを取得する
                sts = session.QueryImpl( inDesc, i, out outDesc );
                if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                    break;
                }

                // 音声認識エンジン名称を表示する
                TextDesc.Text += "\t" + outDesc.friendlyName + "\n";

                // 最初の音声認識エンジンを使う
                if( i== 0 ){
                    desc = outDesc;
                }
            }

            // 音声認識エンジンオブジェクトを作成する
            sts = session.CreateImpl<PXCMSpeechRecognition>( desc, out recognition );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声認識エンジンオブジェクトの作成に失敗しました" );
            }

            // 対応言語を列挙する
            PXCMSpeechRecognition.ProfileInfo profile = null;

            for ( int j = 0;; ++j ) {
                // 音声認識エンジンが持っているプロファイルを取得する
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sts = recognition.QueryProfile( j, out pinfo );
                if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                    break;
                }

                // 対応言語を表示する
                TextDesc.Text += "\t\t" + LanguageToString( pinfo.language ) + "\n";

                // 英語のエンジンを使う(日本語対応時には日本語に変更する)
                if ( pinfo.language == PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH ){
                    profile = pinfo;
                }
            }

            if ( profile == null ){
                throw new Exception( "選択した音声認識エンジンが見つかりませんでした" );
            }

            // 使用する言語を設定する
            sts = recognition.SetProfile( profile );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声認識エンジンオブジェクトの設定に失敗しました" );
            }

            // コマンドモードを設定する
            SetCommandMode();

            // 音声認識の通知ハンドラを作成する
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;

            // 音声認識を開始する
            sts = recognition.StartRec( audioSource, handler );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声認識の開始に失敗しました" );
            }
        }
		/// <summary>
		/// Configures the speech recognition.
		/// </summary>		
		private bool ConfigureSpeechRecognition()
		{
			/* Create the AudioSource instance */
            source=session.CreateAudioSource();

            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);

        	/* Set Audio Source */
	        source.SetDevice(sourceDeviceInfo[_activeSource]);			

	        /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid=modulesIuID[_activeModule];
			
			pxcmStatus sts = session.CreateImpl<PXCMSpeechRecognition>(out sr);
            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                /* Configure */
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(_activeLanguage, out pinfo);
                sr.SetProfile(pinfo);
                
                /* Set Command/Control or Dictation */
                if (SpeechModuleMode == SpeechModuleModeType.CommandControl)
                {
                    string[] cmds = Commands;
                    if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        // voice commands available, use them
				        sr.BuildGrammarFromStringList(1, cmds, null);
                        sr.SetGrammar(1);
                    } 
					else 
					{						
						Debug.Log("Speech Command List Empty!");
						SetError(SpeechManagerErrorType.VoiceThreadError_CommandsListEmpty);
						
						//Switch to dictaction mode
						//SpeechModuleMode = SpeechModuleModeType.Dictation;
                        //sr.SetDictation();
                    }
                }
                else
                {                    
                    sr.SetDictation();
                }
			}
			else 
			{
				Debug.Log("VoiceThreadError - InitFailed - CreateImpl!");
		        SetError(SpeechManagerErrorType.VoiceThreadError_InitFailed_CreateImpl);
				return false;
        	}
			return true;
		}
    private bool ConfigureSpeechRecognition()
    {
        /* Create the AudioSource instance */
        source = session.CreateAudioSource();

        /* Set audio volume to 0.2 */
        source.SetVolume(0.2f);

        /* Set Audio Source */
        source.SetDevice(sourceDeviceInfo[_activeSource]);

        pxcmStatus sts = session.CreateImpl<PXCMSpeechRecognition>(out sr);
        if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            /* Configure */
            PXCMSpeechRecognition.ProfileInfo pinfo;
            sr.QueryProfile(_activeLanguage, out pinfo);
            sr.SetProfile(pinfo);

            /* Set Command/Control or Dictation */
            string[] cmds = new String[4] { "Create", "Save", "Load", "Run" };
            if (cmds != null && cmds.GetLength(0) != 0)
            {
                // voice commands available, use them
                sr.BuildGrammarFromStringList(1, cmds, null);
                sr.SetGrammar(1);
            }
        }
        else
        {
            Debug.Log("VoiceThreadError - InitFailed - CreateImpl!");
            return false;
        }
        return true;
    }