コード例 #1
0
        public void EnableRecognition(SupportedLanguage language)
        {
            _session = PXCMSession.CreateInstance();
            var audioSource = FindAudioSource();

            _session.CreateImpl(out _speechRecognition);
            for (int i = 0; ; i++)
            {
                PXCMSpeechRecognition.ProfileInfo profile;
                if (_speechRecognition.QueryProfile(i, out profile) != Errors.NoError)
                {
                    break;
                }
                var languageLabel             = profile.language.ToString();
                SupportedLanguage sdkLanguage = SupportedLanguageMapper.FromString(languageLabel);
                if (sdkLanguage != SupportedLanguage.NotSpecified)
                {
                    _recognitionProfiles.Add(sdkLanguage, profile);
                }
            }
            if (language == SupportedLanguage.NotSpecified)
            {
                language = _recognitionProfiles.Keys.First();
            }
            if (!_recognitionProfiles.ContainsKey(language))
            {
                throw new LanguageNotSupportedException(language);
            }
            _speechRecognition.SetProfile(_recognitionProfiles[language]);
            _speechRecognition.SetDictation();
            _speechRecognition.StartRec(audioSource, _speechRecognitionHandler);
        }
コード例 #2
0
    // Use this for initialization
    void Start()
    {
        // テキストを渡すスクリプトの参照
        textMesh   = GameObject.Find("Word");
        controller = textMesh.GetComponent <TextController>();

        // 音声認識セッションの初期化
        session = PXCMSession.CreateInstance();
        source  = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        source.QueryDeviceInfo(1, out dinfo);
        source.SetDevice(dinfo);
        Debug.Log(dinfo.name);

        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => controller.SetText(x.scores[0].sentence);
        sr.SetDictation();
        sr.StartRec(source, handler);
    }
コード例 #3
0
    public void Start()
    {
        filePath = Application.dataPath + @"\LogData.txt";
        File.CreateText(filePath);

        //インスタンスの生成
        session = PXCMSession.CreateInstance();
        //音声データの入力
        source = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        //デバイスを検出して出力
        source.QueryDeviceInfo(0, out dinfo);
        source.SetDevice(dinfo);
        Debug.Log(dinfo.name);

        //音声認識
        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        //音声認識の初期設定
        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        //handlerにメソッドを渡す工程
        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => Dataoutput(x.scores[0].sentence, x.duration);
        sr.SetDictation();
    }
コード例 #4
0
        /// <summary>
        /// Configures the speech recognition.
        /// </summary>
        private bool ConfigureSpeechRecognition()
        {
            /* Create the AudioSource instance */
            source = session.CreateAudioSource();

            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);

            /* Set Audio Source */
            source.SetDevice(sourceDeviceInfo[_activeSource]);

            /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid = modulesIuID[_activeModule];

            pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                /* Configure */
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(_activeLanguage, out pinfo);
                sr.SetProfile(pinfo);

                /* Set Command/Control or Dictation */
                if (SpeechModuleMode == SpeechModuleModeType.CommandControl)
                {
                    string[] cmds = Commands;
                    if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        // voice commands available, use them
                        sr.BuildGrammarFromStringList(1, cmds, null);
                        sr.SetGrammar(1);
                    }
                    else
                    {
                        Debug.Log("Speech Command List Empty!");
                        SetError(SpeechManagerErrorType.VoiceThreadError_CommandsListEmpty);

                        //Switch to dictaction mode
                        //SpeechModuleMode = SpeechModuleModeType.Dictation;
                        //sr.SetDictation();
                    }
                }
                else
                {
                    sr.SetDictation();
                }
            }
            else
            {
                Debug.Log("VoiceThreadError - InitFailed - CreateImpl!");
                SetError(SpeechManagerErrorType.VoiceThreadError_InitFailed_CreateImpl);
                return(false);
            }
            return(true);
        }
コード例 #5
0
    void initSession(PXCMSession session)
    {
        if (source == null)
        {
            Debug.Log("Source was null!  No audio device?");
            return;
        }

        // Set audio volume to 0.2
        source.SetVolume(setVolume);

        // Set Audio Source
        Debug.Log("Using device: " + device.name);
        source.SetDevice(device);

        // Set Module
        PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
        mdesc.iuid = 0;

        pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

        if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            // Configure
            PXCMSpeechRecognition.ProfileInfo pinfo;
            // Language
            sr.QueryProfile(0, out pinfo);
            Debug.Log(pinfo.language);
            sr.SetProfile(pinfo);

            // Set Command/Control or Dictation
            sr.SetDictation();

            // Initialization
            Debug.Log("Init Started");
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            handler.onAlert       = OnAlert;

            sts = sr.StartRec(source, handler);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.Log("Voice Rec Started");
            }
            else
            {
                Debug.Log("Voice Rec Start Failed");
            }
        }
        else
        {
            Debug.Log("Voice Rec Session Failed");
        }
    }
コード例 #6
0
        private void ConfigureRealSenseSpeech()
        {
            // Instantiate session and audio source objects
            session     = PXCMSession.CreateInstance();
            audioSource = session.CreateAudioSource();

            // Select the first audio device
            PXCMAudioSource.DeviceInfo deviceInfo;
            deviceInfo = new PXCMAudioSource.DeviceInfo();
            audioSource.QueryDeviceInfo(0, out deviceInfo);
            audioSource.SetDevice(deviceInfo);

            // Set the audio recording volume
            audioSource.SetVolume(0.2f);

            // Create a speech recognition instance
            session.CreateImpl <PXCMSpeechRecognition>(out speechRecognition);

            // Initialize the speech recognition module
            PXCMSpeechRecognition.ProfileInfo profileInfo;
            speechRecognition.QueryProfile(0, out profileInfo);
            profileInfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH;
            speechRecognition.SetProfile(profileInfo);

            // Build and set the active grammar
            pxcmStatus status = speechRecognition.BuildGrammarFromFile(1, PXCMSpeechRecognition.GrammarFileType.GFT_JSGF, "grammarsvm.jsgf");

            if (status == pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                speechRecognition.SetGrammar(1);
            }
            else
            {
                MessageBox.Show("Java Speech Grammar Format (JSGF) file not found!");
                this.Close();
            }

            // Display device information
            //lblDeviceInfo.Content = string.Format("[Device: {0}, Language Profile: {1}]", deviceInfo.name, profileInfo.language);

            // Set the speech recognition handler
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            speechRecognition.StartRec(null, handler);
        }
コード例 #7
0
    public void Start()
    {
        session = PXCMSession.CreateInstance();
        source  = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        source.QueryDeviceInfo(1, out dinfo);
        source.SetDevice(dinfo);

        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => Debug.Log(x.scores[0].sentence);
        sr.SetDictation();
        sr.StartRec(source, handler);
    }
コード例 #8
0
    private bool ConfigureSpeechRecognition()
    {
        /* Create the AudioSource instance */
        source = session.CreateAudioSource();

        /* Set audio volume to 0.2 */
        source.SetVolume(0.2f);

        /* Set Audio Source */
        source.SetDevice(sourceDeviceInfo[_activeSource]);

        pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

        if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            /* Configure */
            PXCMSpeechRecognition.ProfileInfo pinfo;
            sr.QueryProfile(_activeLanguage, out pinfo);
            sr.SetProfile(pinfo);

            /* Set Command/Control or Dictation */
            string[] cmds = new String[4] {
                "Create", "Save", "Load", "Run"
            };
            if (cmds != null && cmds.GetLength(0) != 0)
            {
                // voice commands available, use them
                sr.BuildGrammarFromStringList(1, cmds, null);
                sr.SetGrammar(1);
            }
        }
        else
        {
            Debug.Log("VoiceThreadError - InitFailed - CreateImpl!");
            return(false);
        }
        return(true);
    }
コード例 #9
0
        public void DoIt(MainForm form1, PXCMSession session)
        {
            form = form1;

            /* Create the AudioSource instance */
            source = session.CreateAudioSource();

            if (source == null)
            {
                CleanUp();
                form.PrintStatus("Stopped");
                return;
            }

            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);

            /* Set Audio Source */
            source.SetDevice(form.GetCheckedSource());

            /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid = form.GetCheckedModule();

            pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                /* Configure */
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(form.GetCheckedLanguage(), out pinfo);
                sr.SetProfile(pinfo);

                /////////////////////////////////////////////////////////////////////////////////////////////

                ////////////////////////////////////////////////////////////////////////////////////////////


                /* Set Command/Control or Dictation */
                if (form.IsCommandControl())
                {
                    string[] cmds = form.GetCommands();
                    if (form.g_file != null && form.g_file.Length != 0)
                    {
                        if (form.g_file.EndsWith(".list"))
                        {
                            form.FillCommandListConsole(form.g_file);
                            cmds = form.GetCommands();
                            if (cmds.GetLength(0) == 0)
                            {
                                form.PrintStatus("Command List Load Errors");
                            }
                        }

                        // input Command/Control grammar file available, use it
                        if (!SetGrammarFromFile(form.g_file))
                        {
                            form.PrintStatus("Can not set Grammar From File.");
                            CleanUp();
                            return;
                        }
                        ;
                    }
                    else if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        // voice commands available, use them
                        sts = sr.BuildGrammarFromStringList(1, cmds, null);
                        sts = sr.SetGrammar(1);
                    }
                    else
                    {
                        form.PrintStatus("No Command List. Dictation instead.");
                        if (form.v_file != null && form.v_file.Length != 0)
                        {
                            SetVocabularyFromFile(form.v_file);
                        }
                        sts = sr.SetDictation();
                    }
                }
                else
                {
                    if (form.v_file != null && form.v_file.Length != 0)
                    {
                        SetVocabularyFromFile(form.v_file);
                    }
                    sts = sr.SetDictation();
                }

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Can't start recognition.");
                    CleanUp();
                    return;
                }

                /* Initialization */
                form.PrintStatus("Init Started");
                form.PutLabel1Text("初期化中...");
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                handler.onAlert       = OnAlert;

                sts = sr.StartRec(source, handler);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Init OK");
                    //					form.PutLabel1Text("認識中...");
                    form.PutLabel1Text("マイクに向かって話してください");

                    /* Wait until the stop button is clicked */
                    while (!form.IsStop())
                    {
                        System.Threading.Thread.Sleep(5);
                    }

                    sr.StopRec();
                }
                else
                {
                    form.PrintStatus("Failed to initialize");
                }
            }
            else
            {
                form.PrintStatus("Init Failed");
            }

            CleanUp();
            form.PrintStatus("Stopped");
        }
コード例 #10
0
        // 音声認識を初期化する
        private void InitializeSpeechRecognition()
        {
            pxcmStatus sts;
            var        session = senseManager.QuerySession();

            // 音声入力デバイスを作成する
            audioSource = session.CreateAudioSource();
            if (audioSource == null)
            {
                throw new Exception("音声入力デバイスの作成に失敗しました");
            }

            // 音声入力デバイスを列挙する
            TextDesc.Text  = "";
            TextDesc.Text += "音声入力デバイス\n";

            PXCMAudioSource.DeviceInfo device = null;

            audioSource.ScanDevices();
            for (int i = 0;; ++i)
            {
                PXCMAudioSource.DeviceInfo dinfo;
                sts = audioSource.QueryDeviceInfo(i, out dinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 音声入力デバイス名を表示する
                TextDesc.Text += "\t" + dinfo.name + "\n";

                // 最初のデバイスを使う
                if (i == 0)
                {
                    device = dinfo;
                }
            }

            // 音声入力デバイスを設定する
            sts = audioSource.SetDevice(device);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声入力デバイスの設定に失敗しました");
            }


            // 音声認識エンジンを列挙する
            TextDesc.Text += "音声認識エンジン\n";

            PXCMSession.ImplDesc inDesc  = new PXCMSession.ImplDesc();
            PXCMSession.ImplDesc outDesc = null;
            PXCMSession.ImplDesc desc    = null;
            inDesc.cuids[0] = PXCMSpeechRecognition.CUID;

            for (int i = 0; ; ++i)
            {
                // 音声認識エンジンを取得する
                sts = session.QueryImpl(inDesc, i, out outDesc);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 音声認識エンジン名称を表示する
                TextDesc.Text += "\t" + outDesc.friendlyName + "\n";

                // 最初の音声認識エンジンを使う
                if (i == 0)
                {
                    desc = outDesc;
                }
            }

            // 音声認識エンジンオブジェクトを作成する
            sts = session.CreateImpl <PXCMSpeechRecognition>(desc, out recognition);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識エンジンオブジェクトの作成に失敗しました");
            }

            // 対応言語を列挙する
            PXCMSpeechRecognition.ProfileInfo profile = null;

            for (int j = 0;; ++j)
            {
                // 音声認識エンジンが持っているプロファイルを取得する
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sts = recognition.QueryProfile(j, out pinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 対応言語を表示する
                TextDesc.Text += "\t\t" + LanguageToString(pinfo.language) + "\n";

                // 英語のエンジンを使う(日本語対応時には日本語に変更する)
                if (pinfo.language == PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH)
                {
                    profile = pinfo;
                }
            }

            if (profile == null)
            {
                throw new Exception("選択した音声認識エンジンが見つかりませんでした");
            }

            // 使用する言語を設定する
            sts = recognition.SetProfile(profile);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識エンジンオブジェクトの設定に失敗しました");
            }

            // コマンドモードを設定する
            SetCommandMode();

            // 音声認識の通知ハンドラを作成する
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;

            // 音声認識を開始する
            sts = recognition.StartRec(audioSource, handler);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識の開始に失敗しました");
            }
        }
コード例 #11
0
        static void SetupRecognizer(PXCMSession session)
        {
            PXCMAudioSource.DeviceInfo dinfo = null;
            if (session != null)
            {
                #region Audio Source

                // session is a PXCMSession instance.
                source = session.CreateAudioSource();

                // Scan and Enumerate audio devices
                source.ScanDevices();

                for (int d = source.QueryDeviceNum() - 1; d >= 0; d--)
                {
                    source.QueryDeviceInfo(d, out dinfo);

                    // Select one and break out of the loop
                    break;
                }
                if (dinfo != null)
                {
                    // Set the active device
                    source.SetDevice(dinfo);
                }

                #endregion

                #region Recognizer Instance

                pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    return;
                }

                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(0, out pinfo);
                sr.SetProfile(pinfo);

                //sr.SetDictation();

                #endregion

                #region Grammar
                Perintah = new Dictionary <string, Module>();
                // sr is a PXCMSpeechRecognition instance.
                using (var data = new JONGOS_DBEntities())
                {
                    var listCommand = from c in data.Modules
                                      orderby c.ID
                                      select c;
                    foreach (var item in listCommand.Distinct())
                    {
                        Perintah.Add(item.VoiceCommand, item);
                    }
                }
                List <string> cmds = new List <string>();
                foreach (var cmd in Perintah.Keys)
                {
                    cmds.Add(cmd);
                }

                // Build the grammar.
                sr.BuildGrammarFromStringList(1, cmds.ToArray(), null);

                // Set the active grammar.
                sr.SetGrammar(1);
                #endregion

                #region recognition Event
                // Set handler
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                //handler.onAlert = OnAlert;
                // sr is a PXCMSpeechRecognition instance
                pxcmStatus stsrec = sr.StartRec(source, handler);
                if (stsrec < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    Console.WriteLine("Recognizer error!");
                }


                #endregion
            }
        }