// Use this for initialization
    void Start()
    {
        // テキストを渡すスクリプトの参照
        textMesh   = GameObject.Find("Word");
        controller = textMesh.GetComponent <TextController>();

        // 音声認識セッションの初期化
        session = PXCMSession.CreateInstance();
        source  = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        source.QueryDeviceInfo(1, out dinfo);
        source.SetDevice(dinfo);
        Debug.Log(dinfo.name);

        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => controller.SetText(x.scores[0].sentence);
        sr.SetDictation();
        sr.StartRec(source, handler);
    }
Ejemplo n.º 2
0
 public SpeechRecognition() {
     _recognitionProfiles = new Dictionary<SupportedLanguage, PXCMSpeechRecognition.ProfileInfo>();
     _speechRecognitionHandler = new PXCMSpeechRecognition.Handler {
         onRecognition = OnRecognition,
         onAlert = OnAlert
     };
 }
Ejemplo n.º 3
0
    public void Start()
    {
        filePath = Application.dataPath + @"\LogData.txt";
        File.CreateText(filePath);

        //インスタンスの生成
        session = PXCMSession.CreateInstance();
        //音声データの入力
        source = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        //デバイスを検出して出力
        source.QueryDeviceInfo(0, out dinfo);
        source.SetDevice(dinfo);
        Debug.Log(dinfo.name);

        //音声認識
        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        //音声認識の初期設定
        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        //handlerにメソッドを渡す工程
        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => Dataoutput(x.scores[0].sentence, x.duration);
        sr.SetDictation();
    }
Ejemplo n.º 4
0
 public SpeechRecognition()
 {
     _recognitionProfiles      = new Dictionary <SupportedLanguage, PXCMSpeechRecognition.ProfileInfo>();
     _speechRecognitionHandler = new PXCMSpeechRecognition.Handler {
         onRecognition = OnRecognition,
         onAlert       = OnAlert
     };
 }
Ejemplo n.º 5
0
        /// <summary>
        /// Runs on a different thread
        /// </summary>
        private void DoVoiceRecognition()
        {
            if (running)
            {
                Debug.Log("Failed to start voice recognition - already running");
                SetError(SpeechManagerErrorType.VoiceThreadError_AlreadyRunning);
                return;
            }

            running = true;

            if (ConfigureSpeechRecognition())
            {
                /* Initialization */
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                handler.onAlert       = OnAlert;

                pxcmStatus sts = sr.StartRec(source, handler);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    while (!stop)
                    {
                        if (reset)
                        {
                            sr.StopRec();
                            ConfigureSpeechRecognition();
                            sts = sr.StartRec(source, handler);
                            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                            {
                                Debug.Log("VoiceThreadError - ResetFailed - StartRec!");
                                SetError(SpeechManagerErrorType.VoiceThreadError_ResetFailed_StartRec);
                                stop = true;
                                CleanUp();
                                running = false;
                                return;
                            }

                            reset = false;
                        }

                        System.Threading.Thread.Sleep(50);
                    }

                    sr.StopRec();
                }
                else
                {
                    Debug.Log("VoiceThreadError - InitFailed - StartRec!");
                    SetError(SpeechManagerErrorType.VoiceThreadError_InitFailed_StartRec);
                }
            }

            CleanUp();

            running = false;
        }
    void initSession(PXCMSession session)
    {
        if (source == null)
        {
            Debug.Log("Source was null!  No audio device?");
            return;
        }

        // Set audio volume to 0.2
        source.SetVolume(setVolume);

        // Set Audio Source
        Debug.Log("Using device: " + device.name);
        source.SetDevice(device);

        // Set Module
        PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
        mdesc.iuid = 0;

        pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

        if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            // Configure
            PXCMSpeechRecognition.ProfileInfo pinfo;
            // Language
            sr.QueryProfile(0, out pinfo);
            Debug.Log(pinfo.language);
            sr.SetProfile(pinfo);

            // Set Command/Control or Dictation
            sr.SetDictation();

            // Initialization
            Debug.Log("Init Started");
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            handler.onAlert       = OnAlert;

            sts = sr.StartRec(source, handler);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.Log("Voice Rec Started");
            }
            else
            {
                Debug.Log("Voice Rec Start Failed");
            }
        }
        else
        {
            Debug.Log("Voice Rec Session Failed");
        }
    }
        private void Initialize()
        {
            this.GetSessionAndSenseManager();

            this.audioSource = this.session.CreateAudioSource();

            pxcmStatus sts = this.session.CreateImpl <PXCMSpeechRecognition>(this.descs[0], out this.recognition);

            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("Could not create audio source.");
            }

            // 音声入力デバイスを設定する
            //PXCMAudioSource.DeviceInfo dinfo = (PXCMAudioSource.DeviceInfo)deviceInfos[FInAudioDevice[0]];
            //FLogger.Log(LogType.Debug, dinfo.name);
            //this.audioSource.SetDevice(dinfo);
            for (int i = 0; i < deviceInfos.Count; i++)
            {
                PXCMAudioSource.DeviceInfo dinfo = deviceInfos[i];
                if (dinfo.name.Equals(FInAudioDevice[0]))
                {
                    this.audioSource.SetDevice(dinfo);
                }
            }

            // set language
            for (int i = 0; i < profileInfos.Count; i++)
            {
                PXCMSpeechRecognition.ProfileInfo pinfo = profileInfos[i];
                if (pinfo.language.ToString().Equals(FInLanguage[0]))
                {
                    this.recognition.SetProfile(pinfo);
                }
            }

            // set dictation mode
            sts = this.recognition.SetDictation();
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("Could not set dictation mode. " + sts.ToString());
            }

            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;

            sts = this.recognition.StartRec(this.audioSource, handler);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("Could not start recording.");
            }

            this.initialized = true;
        }
Ejemplo n.º 8
0
        static void Main(string[] args)
        {
            PXCMSession session = PXCMSession.CreateInstance();

            Console.WriteLine("SDK Version {0}.{1}", session.QueryVersion().major, session.QueryVersion().minor);
            PXCMSpeechRecognition sr;
            pxcmStatus            status = session.CreateImpl <PXCMSpeechRecognition>(out sr);

            Console.WriteLine("STATUS : " + status);
            PXCMSpeechRecognition.ProfileInfo pinfo;
            sr.QueryProfile(0, out pinfo);
            sr.SetProfile(pinfo);

            String[] cmds = new String[3] {
                "One", "Two", "Three"
            };
            // Build the grammar.
            sr.BuildGrammarFromStringList(1, cmds, null);
            // Set the active grammar.
            sr.SetGrammar(1);

            //sr.SetDictation();
            PXCMAudioSource source;

            source = session.CreateAudioSource();
            source.ScanDevices();

            for (int i = 0; ; i++)
            {
                PXCMAudioSource.DeviceInfo dinfo;
                if (source.QueryDeviceInfo(i, out dinfo) < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }
                devices.Add(dinfo);
                Console.WriteLine("Device : " + dinfo.name);
            }


            source.SetDevice(GetCheckedSource());

            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            // sr is a PXCMSpeechRecognition instance
            status = sr.StartRec(source, handler);
            Console.WriteLine("AFTER start : " + status);
            while (true)
            {
                System.Threading.Thread.Sleep(5);
            }

            session.Dispose();
        }
        private void ConfigureRealSenseSpeech()
        {
            // Instantiate session and audio source objects
            session     = PXCMSession.CreateInstance();
            audioSource = session.CreateAudioSource();

            // Select the first audio device
            PXCMAudioSource.DeviceInfo deviceInfo;
            deviceInfo = new PXCMAudioSource.DeviceInfo();
            audioSource.QueryDeviceInfo(0, out deviceInfo);
            audioSource.SetDevice(deviceInfo);

            // Set the audio recording volume
            audioSource.SetVolume(0.2f);

            // Create a speech recognition instance
            session.CreateImpl <PXCMSpeechRecognition>(out speechRecognition);

            // Initialize the speech recognition module
            PXCMSpeechRecognition.ProfileInfo profileInfo;
            speechRecognition.QueryProfile(0, out profileInfo);
            profileInfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH;
            speechRecognition.SetProfile(profileInfo);

            // Build and set the active grammar
            pxcmStatus status = speechRecognition.BuildGrammarFromFile(1, PXCMSpeechRecognition.GrammarFileType.GFT_JSGF, "grammarsvm.jsgf");

            if (status == pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                speechRecognition.SetGrammar(1);
            }
            else
            {
                MessageBox.Show("Java Speech Grammar Format (JSGF) file not found!");
                this.Close();
            }

            // Display device information
            //lblDeviceInfo.Content = string.Format("[Device: {0}, Language Profile: {1}]", deviceInfo.name, profileInfo.language);

            // Set the speech recognition handler
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            speechRecognition.StartRec(null, handler);
        }
Ejemplo n.º 10
0
    public void Start()
    {
        session = PXCMSession.CreateInstance();
        source  = session.CreateAudioSource();

        PXCMAudioSource.DeviceInfo dinfo = null;

        source.QueryDeviceInfo(1, out dinfo);
        source.SetDevice(dinfo);

        session.CreateImpl <PXCMSpeechRecognition>(out sr);

        PXCMSpeechRecognition.ProfileInfo pinfo;
        sr.QueryProfile(out pinfo);
        pinfo.language = PXCMSpeechRecognition.LanguageType.LANGUAGE_JP_JAPANESE;
        sr.SetProfile(pinfo);

        handler = new PXCMSpeechRecognition.Handler();
        handler.onRecognition = (x) => Debug.Log(x.scores[0].sentence);
        sr.SetDictation();
        sr.StartRec(source, handler);
    }
Ejemplo n.º 11
0
        // 音声認識を初期化する
        private void InitializeSpeechRecognition()
        {
            pxcmStatus sts;
            var        session = senseManager.QuerySession();

            // 音声入力デバイスを作成する
            audioSource = session.CreateAudioSource();
            if (audioSource == null)
            {
                throw new Exception("音声入力デバイスの作成に失敗しました");
            }

            // 音声入力デバイスを列挙する
            TextDesc.Text  = "";
            TextDesc.Text += "音声入力デバイス\n";

            PXCMAudioSource.DeviceInfo device = null;

            audioSource.ScanDevices();
            for (int i = 0;; ++i)
            {
                PXCMAudioSource.DeviceInfo dinfo;
                sts = audioSource.QueryDeviceInfo(i, out dinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 音声入力デバイス名を表示する
                TextDesc.Text += "\t" + dinfo.name + "\n";

                // 最初のデバイスを使う
                if (i == 0)
                {
                    device = dinfo;
                }
            }

            // 音声入力デバイスを設定する
            sts = audioSource.SetDevice(device);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声入力デバイスの設定に失敗しました");
            }


            // 音声認識エンジンを列挙する
            TextDesc.Text += "音声認識エンジン\n";

            PXCMSession.ImplDesc inDesc  = new PXCMSession.ImplDesc();
            PXCMSession.ImplDesc outDesc = null;
            PXCMSession.ImplDesc desc    = null;
            inDesc.cuids[0] = PXCMSpeechRecognition.CUID;

            for (int i = 0; ; ++i)
            {
                // 音声認識エンジンを取得する
                sts = session.QueryImpl(inDesc, i, out outDesc);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 音声認識エンジン名称を表示する
                TextDesc.Text += "\t" + outDesc.friendlyName + "\n";

                // 最初の音声認識エンジンを使う
                if (i == 0)
                {
                    desc = outDesc;
                }
            }

            // 音声認識エンジンオブジェクトを作成する
            sts = session.CreateImpl <PXCMSpeechRecognition>(desc, out recognition);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識エンジンオブジェクトの作成に失敗しました");
            }

            // 対応言語を列挙する
            PXCMSpeechRecognition.ProfileInfo profile = null;

            for (int j = 0;; ++j)
            {
                // 音声認識エンジンが持っているプロファイルを取得する
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sts = recognition.QueryProfile(j, out pinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                // 対応言語を表示する
                TextDesc.Text += "\t\t" + LanguageToString(pinfo.language) + "\n";

                // 英語のエンジンを使う(日本語対応時には日本語に変更する)
                if (pinfo.language == PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH)
                {
                    profile = pinfo;
                }
            }

            if (profile == null)
            {
                throw new Exception("選択した音声認識エンジンが見つかりませんでした");
            }

            // 使用する言語を設定する
            sts = recognition.SetProfile(profile);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識エンジンオブジェクトの設定に失敗しました");
            }

            // コマンドモードを設定する
            SetCommandMode();

            // 音声認識の通知ハンドラを作成する
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;

            // 音声認識を開始する
            sts = recognition.StartRec(audioSource, handler);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                throw new Exception("音声認識の開始に失敗しました");
            }
        }
        public void DoIt(MainForm form1, PXCMSession session, PXCMAudioSource s)
        {
            Debug.Log("DoIt");
            form = form1;
            Debug.Log("DoIt:01");
            /* Create the AudioSource instance */
            //source = s;
            source =session.CreateAudioSource();
            Debug.Log("DoIt:02");
            if (source == null) {
                CleanUp();
                form.PrintStatus("Stopped");
                return;
            }
            Debug.Log("DoIt:03");
            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);
            Debug.Log("DoIt:04");
            /* Set Audio Source */
            source.SetDevice(form.GetCheckedSource());
            Debug.Log("DoIt:05");
            /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid = form.GetCheckedModule();
            Debug.Log("DoIt:06");
            pxcmStatus sts = session.CreateImpl<PXCMSpeechRecognition>(out sr);
            Debug.Log("DoIt:07");

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.Log("DoIt:10");
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(form.GetCheckedLanguage(), out pinfo);
                sr.SetProfile(pinfo);

                if (form.IsCommandControl())
                {
                    Debug.Log("DoIt:20");
                    string[] cmds = form.GetCommands();
                    if (form.g_file != null && form.g_file.Length != 0)
                    {
                        Debug.Log("DoIt:30");
                        if (form.g_file.EndsWith(".list")){
                            Debug.Log("DoIt:40");
                            form.FillCommandListConsole(form.g_file);
                            cmds = form.GetCommands();
                            if (cmds.GetLength(0) == 0)
                                form.PrintStatus("Command List Load Errors");
                        }

                        // input Command/Control grammar file available, use it
                        if (!SetGrammarFromFile(form.g_file))
                        {
                            Debug.Log("DoIt:41");
                            form.PrintStatus("Can not set Grammar From File.");
                            CleanUp();
                            return;
                        };
                    }
                    else if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        Debug.Log("DoIt:31");
                        // voice commands available, use them
                        sts = sr.BuildGrammarFromStringList(1, cmds, null);
                        sts = sr.SetGrammar(1);
                    } else {
                        Debug.Log("DoIt:32");
                        form.PrintStatus("No Command List. Dictation instead.");
                        if (form.v_file != null && form.v_file.Length != 0) SetVocabularyFromFile(form.v_file);
                        sts = sr.SetDictation();
                    }
                }
                else
                {
                    Debug.Log("DoIt:21");
                    if (form.v_file != null && form.v_file.Length != 0) SetVocabularyFromFile(form.v_file);
                    sts = sr.SetDictation();
                }

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Can't start recognition.");
                    CleanUp();
                    return;
                }

                form.PrintStatus("Init Started");
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition=OnRecognition;
                handler.onAlert=OnAlert;

                sts=sr.StartRec(source, handler);
                if (sts>=pxcmStatus.PXCM_STATUS_NO_ERROR) {
                    form.PrintStatus("Init OK");

                    // Wait until the stop button is clicked
                    while (!form.IsStop()) {
                        System.Threading.Thread.Sleep(5);
                    }

                    sr.StopRec();
                } else {
                    form.PrintStatus("Failed to initialize");
                }
            } else {
                form.PrintStatus("Init Failed");
            }

            Debug.Log("DoIt:98");
            CleanUp();
            form.PrintStatus("Stopped");
            Debug.Log("DoIt:99");
        }
Ejemplo n.º 13
0
        public MainWindow()
        {
            InitializeComponent();

            #region Hand
            nodes = new PXCMHandData.JointData[][] { new PXCMHandData.JointData[0x20], new PXCMHandData.JointData[0x20] };
            xValues = new float[arraySize];
            yValues = new float[arraySize];
            zValues = new float[arraySize];
            #endregion Hand

            // Setto la modalita' test per la guida del drone ON/OFF
            TestModeCheck.IsChecked = true;

            genericItems = new ObservableCollection<GenericItem>();

            se = PXCMSession.CreateInstance();

            if (se != null)
            {
                //processingThread = new Thread(new ThreadStart(ProcessingHandThread));
                //senseManager = PXCMSenseManager.CreateInstance();
                //senseManager.EnableHand();
                //senseManager.Init();
                //ConfigureHandModule();
                //processingThread.Start();



                // session is a PXCMSession instance.
                audiosource = se.CreateAudioSource();
                // Scan and Enumerate audio devices
                audiosource.ScanDevices();

                PXCMAudioSource.DeviceInfo dinfo = null;

                for (int d = audiosource.QueryDeviceNum() - 1; d >= 0; d--)
                {
                    audiosource.QueryDeviceInfo(d, out dinfo);
                }
                audiosource.SetDevice(dinfo);

                se.CreateImpl<PXCMSpeechRecognition>(out sr);
              

                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(0, out pinfo);
                sr.SetProfile(pinfo);

                // sr is a PXCMSpeechRecognition instance.
                String[] cmds = new String[] { "Takeoff", "Land", "Rotate Left", "Rotate Right", "Advance",
                    "Back", "Up", "Down", "Left", "Right", "Stop" , "Dance"};
                int[] labels = new int[] { 1, 2, 4, 5, 8, 16, 32, 64, 128, 256, 512, 1024 };
                // Build the grammar.
                sr.BuildGrammarFromStringList(1, cmds, labels);
                // Set the active grammar.
                sr.SetGrammar(1);
                // Set handler

                RecognitionHandler = new PXCMSpeechRecognition.Handler();

                RecognitionHandler.onRecognition = OnRecognition;

                Legenda.Items.Add("------ Available Commands ------");
                foreach (var cmd in cmds)
                {
                    Legenda.Items.Add(cmd);
                }
            }
        }
Ejemplo n.º 14
0
        static void SetupRecognizer(PXCMSession session)
        {
            PXCMAudioSource.DeviceInfo dinfo = null;
            if (session != null)
            {
                #region Audio Source

                // session is a PXCMSession instance.
                source = session.CreateAudioSource();

                // Scan and Enumerate audio devices
                source.ScanDevices();

                for (int d = source.QueryDeviceNum() - 1; d >= 0; d--)
                {
                    source.QueryDeviceInfo(d, out dinfo);

                    // Select one and break out of the loop
                    break;
                }
                if (dinfo != null)
                {
                    // Set the active device
                    source.SetDevice(dinfo);
                }

                #endregion

                #region Recognizer Instance

                pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    return;
                }

                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(0, out pinfo);
                sr.SetProfile(pinfo);

                //sr.SetDictation();

                #endregion

                #region Grammar
                Perintah = new Dictionary <string, Module>();
                // sr is a PXCMSpeechRecognition instance.
                using (var data = new JONGOS_DBEntities())
                {
                    var listCommand = from c in data.Modules
                                      orderby c.ID
                                      select c;
                    foreach (var item in listCommand.Distinct())
                    {
                        Perintah.Add(item.VoiceCommand, item);
                    }
                }
                List <string> cmds = new List <string>();
                foreach (var cmd in Perintah.Keys)
                {
                    cmds.Add(cmd);
                }

                // Build the grammar.
                sr.BuildGrammarFromStringList(1, cmds.ToArray(), null);

                // Set the active grammar.
                sr.SetGrammar(1);
                #endregion

                #region recognition Event
                // Set handler
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                //handler.onAlert = OnAlert;
                // sr is a PXCMSpeechRecognition instance
                pxcmStatus stsrec = sr.StartRec(source, handler);
                if (stsrec < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    Console.WriteLine("Recognizer error!");
                }


                #endregion
            }
        }
		/// <summary>
		/// Runs on a different thread
		/// </summary>
		private void DoVoiceRecognition()
        {
			if (running)
			{
				Debug.Log("Failed to start voice recognition - already running");
				SetError(SpeechManagerErrorType.VoiceThreadError_AlreadyRunning);
				return;
			}
			
			running = true;
			
			if (ConfigureSpeechRecognition())
			{
            	/* Initialization */	            
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                handler.onAlert = OnAlert;
                
                pxcmStatus sts = sr.StartRec(source, handler);
                if (sts>=pxcmStatus.PXCM_STATUS_NO_ERROR) 
				{
                    while (!stop) 
					{
						if (reset)
						{
							sr.StopRec();							
							ConfigureSpeechRecognition();
							sts = sr.StartRec(source, handler);
			                if (sts<pxcmStatus.PXCM_STATUS_NO_ERROR) 
							{
								Debug.Log("VoiceThreadError - ResetFailed - StartRec!");
		        				SetError(SpeechManagerErrorType.VoiceThreadError_ResetFailed_StartRec);
								stop = true;								
								CleanUp();
								running = false;
								return;
							}
							
							reset = false;
						}
						
                        System.Threading.Thread.Sleep(50);
                    }

                    sr.StopRec();					
                } 
				else 
				{
                    Debug.Log("VoiceThreadError - InitFailed - StartRec!");
		        	SetError(SpeechManagerErrorType.VoiceThreadError_InitFailed_StartRec);
                }
	        } 
			
            CleanUp();
	        
			running = false;
        }
Ejemplo n.º 16
0
    void InitSession(PXCMSession session)
    {
        if (source == null)
        {
            Debug.Log("Source was null!  No audio device?");
            return;
        }

        // Set audio volume to 0.2 
        source.SetVolume(setVolume);

        // Set Audio Source 
        foreach (var device in SpeechRecognizer.devices)
        {
            if (device.name == "マイク配列 (Creative VF0800)")
            {
                source.SetDevice(device);
                Debug.Log("Using device: " + device.name);
            }
        }

        // Set Module 
        PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
        mdesc.iuid = 0;

        var status = session.CreateImpl<PXCMSpeechRecognition>(out this.recognizer);

        if (status >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            // Configure 
            PXCMSpeechRecognition.ProfileInfo pinfo;
            // Language
            this.recognizer.QueryProfile(this.languageIndex, out pinfo);
            Debug.Log(pinfo.language);
            this.recognizer.SetProfile(pinfo);

            // Set Command/Control or Dictation 
            this.recognizer.SetDictation();

            // Initialization 
            Debug.Log("Init Started");
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            handler.onAlert = OnAlert;

            status = this.recognizer.StartRec(source, handler);

            if (status >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.Log("Voice Rec Started");
            }
            else
            {
                Debug.Log("Voice Rec Start Failed");
            }
        }
        else
        {
            Debug.Log("Voice Rec Session Failed");
        }
    }
Ejemplo n.º 17
0
        public void DoIt(MainForm form1, PXCMSession session)
        {
            form = form1;

            /* Create the AudioSource instance */
            source = session.CreateAudioSource();

            if (source == null)
            {
                CleanUp();
                form.PrintStatus("Stopped");
                return;
            }

            /* Set audio volume to 0.2 */
            source.SetVolume(0.2f);

            /* Set Audio Source */
            source.SetDevice(form.GetCheckedSource());

            /* Set Module */
            PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
            mdesc.iuid = form.GetCheckedModule();

            pxcmStatus sts = session.CreateImpl <PXCMSpeechRecognition>(out sr);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                /* Configure */
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sr.QueryProfile(form.GetCheckedLanguage(), out pinfo);
                sr.SetProfile(pinfo);

                /////////////////////////////////////////////////////////////////////////////////////////////

                ////////////////////////////////////////////////////////////////////////////////////////////


                /* Set Command/Control or Dictation */
                if (form.IsCommandControl())
                {
                    string[] cmds = form.GetCommands();
                    if (form.g_file != null && form.g_file.Length != 0)
                    {
                        if (form.g_file.EndsWith(".list"))
                        {
                            form.FillCommandListConsole(form.g_file);
                            cmds = form.GetCommands();
                            if (cmds.GetLength(0) == 0)
                            {
                                form.PrintStatus("Command List Load Errors");
                            }
                        }

                        // input Command/Control grammar file available, use it
                        if (!SetGrammarFromFile(form.g_file))
                        {
                            form.PrintStatus("Can not set Grammar From File.");
                            CleanUp();
                            return;
                        }
                        ;
                    }
                    else if (cmds != null && cmds.GetLength(0) != 0)
                    {
                        // voice commands available, use them
                        sts = sr.BuildGrammarFromStringList(1, cmds, null);
                        sts = sr.SetGrammar(1);
                    }
                    else
                    {
                        form.PrintStatus("No Command List. Dictation instead.");
                        if (form.v_file != null && form.v_file.Length != 0)
                        {
                            SetVocabularyFromFile(form.v_file);
                        }
                        sts = sr.SetDictation();
                    }
                }
                else
                {
                    if (form.v_file != null && form.v_file.Length != 0)
                    {
                        SetVocabularyFromFile(form.v_file);
                    }
                    sts = sr.SetDictation();
                }

                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Can't start recognition.");
                    CleanUp();
                    return;
                }

                /* Initialization */
                form.PrintStatus("Init Started");
                form.PutLabel1Text("初期化中...");
                PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
                handler.onRecognition = OnRecognition;
                handler.onAlert       = OnAlert;

                sts = sr.StartRec(source, handler);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    form.PrintStatus("Init OK");
                    //					form.PutLabel1Text("認識中...");
                    form.PutLabel1Text("マイクに向かって話してください");

                    /* Wait until the stop button is clicked */
                    while (!form.IsStop())
                    {
                        System.Threading.Thread.Sleep(5);
                    }

                    sr.StopRec();
                }
                else
                {
                    form.PrintStatus("Failed to initialize");
                }
            }
            else
            {
                form.PrintStatus("Init Failed");
            }

            CleanUp();
            form.PrintStatus("Stopped");
        }
Ejemplo n.º 18
0
    void initSession(PXCMSession session)
    {
        if (source == null)
        {
            Debug.Log("Source was null!  No audio device?");
            return;
        }

        // Set audio volume to 0.2
        source.SetVolume(setVolume);

        // Set Audio Source
        Debug.Log("Using device: " + device.name);
        source.SetDevice(device);

        // Set Module
        PXCMSession.ImplDesc mdesc = new PXCMSession.ImplDesc();
        mdesc.iuid = 0;

        pxcmStatus sts = session.CreateImpl<PXCMSpeechRecognition>(out sr);

        if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
        {
            // Configure
            PXCMSpeechRecognition.ProfileInfo pinfo;
            // Language
            sr.QueryProfile(0, out pinfo);
            Debug.Log(pinfo.language);
            sr.SetProfile(pinfo);

            // Set Command/Control or Dictation
            sr.SetDictation();

            // Initialization
            Debug.Log("Init Started");
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;
            handler.onAlert = OnAlert;

            sts = sr.StartRec(source, handler);

            if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Debug.Log("Voice Rec Started");
            }
            else
            {
                Debug.Log("Voice Rec Start Failed");
            }
        }
        else
        {
            Debug.Log("Voice Rec Session Failed");
        }
    }
        // 音声認識を初期化する
        private void InitializeSpeechRecognition()
        {
            pxcmStatus sts;
            var session = senseManager.QuerySession();

            // 音声入力デバイスを作成する
            audioSource = session.CreateAudioSource();
            if ( audioSource == null ){
                throw new Exception( "音声入力デバイスの作成に失敗しました" );
            }

            // 音声入力デバイスを列挙する
            TextDesc.Text = "";
            TextDesc.Text += "音声入力デバイス\n";

            PXCMAudioSource.DeviceInfo device = null;

            audioSource.ScanDevices();
            for ( int i = 0;; ++i ) {
                PXCMAudioSource.DeviceInfo dinfo;
                sts = audioSource.QueryDeviceInfo( i, out dinfo );
                if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                    break;
                }

                // 音声入力デバイス名を表示する
                TextDesc.Text += "\t" + dinfo.name + "\n";

                // 最初のデバイスを使う
                if ( i == 0 ){
                    device = dinfo;
                }
            }

            // 音声入力デバイスを設定する
            sts = audioSource.SetDevice( device );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声入力デバイスの設定に失敗しました" );
            }

            // 音声認識エンジンを列挙する
            TextDesc.Text += "音声認識エンジン\n";

            PXCMSession.ImplDesc inDesc = new PXCMSession.ImplDesc();
            PXCMSession.ImplDesc outDesc = null;
            PXCMSession.ImplDesc desc = null;
            inDesc.cuids[0] = PXCMSpeechRecognition.CUID;

            for ( int i = 0; ; ++i ) {
                // 音声認識エンジンを取得する
                sts = session.QueryImpl( inDesc, i, out outDesc );
                if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                    break;
                }

                // 音声認識エンジン名称を表示する
                TextDesc.Text += "\t" + outDesc.friendlyName + "\n";

                // 最初の音声認識エンジンを使う
                if( i== 0 ){
                    desc = outDesc;
                }
            }

            // 音声認識エンジンオブジェクトを作成する
            sts = session.CreateImpl<PXCMSpeechRecognition>( desc, out recognition );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声認識エンジンオブジェクトの作成に失敗しました" );
            }

            // 対応言語を列挙する
            PXCMSpeechRecognition.ProfileInfo profile = null;

            for ( int j = 0;; ++j ) {
                // 音声認識エンジンが持っているプロファイルを取得する
                PXCMSpeechRecognition.ProfileInfo pinfo;
                sts = recognition.QueryProfile( j, out pinfo );
                if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                    break;
                }

                // 対応言語を表示する
                TextDesc.Text += "\t\t" + LanguageToString( pinfo.language ) + "\n";

                // 英語のエンジンを使う(日本語対応時には日本語に変更する)
                if ( pinfo.language == PXCMSpeechRecognition.LanguageType.LANGUAGE_US_ENGLISH ){
                    profile = pinfo;
                }
            }

            if ( profile == null ){
                throw new Exception( "選択した音声認識エンジンが見つかりませんでした" );
            }

            // 使用する言語を設定する
            sts = recognition.SetProfile( profile );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声認識エンジンオブジェクトの設定に失敗しました" );
            }

            // コマンドモードを設定する
            SetCommandMode();

            // 音声認識の通知ハンドラを作成する
            PXCMSpeechRecognition.Handler handler = new PXCMSpeechRecognition.Handler();
            handler.onRecognition = OnRecognition;

            // 音声認識を開始する
            sts = recognition.StartRec( audioSource, handler );
            if ( sts < pxcmStatus.PXCM_STATUS_NO_ERROR ) {
                throw new Exception( "音声認識の開始に失敗しました" );
            }
        }