Esempio n. 1
0
    /// <summary>
    /// 单次语音识别方法
    /// </summary>
    /// <param name="sid"></param>
    /// <returns></returns>
    private static RecogStatus SingleSpeechRecognition(string ssid)
    {
        rec_result = "";
        byte[] audio_buffer = Utils.GetFileData(mt.voice_path + "/rec.wav");
        long   audio_size   = audio_buffer.Length;
        long   audio_count  = 0;

        ep_status = epStatus.MSP_EP_LOOKING_FOR_SPEECH;
        while (epStatus.MSP_EP_AFTER_SPEECH != ep_status)
        {
            audio_stat = audioStatus.MSP_AUDIO_SAMPLE_CONTINUE;
            long len = 10 * FRAME_LEN; //16k音频,10帧 (时长200ms)
            if (audio_size < 2 * len)
            {
                len = (int)audio_size;
            }
            if (len <= 0)
            {
                break;
            }
            if (0 == audio_count)
            {
                audio_stat = audioStatus.MSP_AUDIO_SAMPLE_FIRST;
            }
            ret = MSC.QISRAudioWrite(ssid, audio_buffer.Skip((int)audio_count).Take((int)len).ToArray(), (uint)len, audio_stat, ref ep_status, ref recoStatus);
            if (ret != (int)ErrorCode.MSP_SUCCESS)
            {
                Debug.Log(string.Format("读取音频失败:{0}!", ret)); return(RecogStatus.ISR_REC_NULL);
            }
            audio_count += len;
            audio_size  -= len;
        }
        ret = MSC.QISRAudioWrite(ssid, null, 0, audioStatus.MSP_AUDIO_SAMPLE_LAST, ref ep_status, ref recoStatus);
        if (ret != (int)ErrorCode.MSP_SUCCESS)
        {
            Debug.Log(string.Format("识别音频失败:{0}!", ret)); return(RecogStatus.ISR_REC_NULL);
        }
        while (RecogStatus.ISR_REC_STATUS_SPEECH_COMPLETE != recoStatus)
        {
            IntPtr rslt = MSC.QISRGetResult(ssid, ref recoStatus, 0, ref ret);
            if (ret != (int)ErrorCode.MSP_SUCCESS)
            {
                Debug.Log(string.Format("音频无法识别:{0}!", ret)); return(RecogStatus.ISR_REC_NULL);
            }

            if (rslt != IntPtr.Zero)
            {
                rec_result = rec_result + Utils.Ptr2Str(rslt);
                if (rec_result.Length >= BUFFER_SIZE)
                {
                    Debug.Log("no enough buffer for rec_result");
                    return(RecogStatus.ISR_REC_NULL);
                }
            }
            Thread.Sleep(150); //防止频繁占用CPU
        }
        return(recoStatus);
    }
Esempio n. 2
0
    /// <summary>
    /// 语音唤醒方法
    /// </summary>
    /// <param name="sid"></param>
    private static void VoiceArousal(string sid)
    {
        string file = mic.startRecording("hx");

        if (file == string.Empty)
        {
            return;
        }
        //byte[] audio_buffer = GetFileData(file);
        byte[] audio_buffer = Utils.GetFileData(Environment.CurrentDirectory + "/wav/rec.wav");
        int    audio_size   = audio_buffer.Length;
        int    audio_count  = 0;

        while (audio_stat != audioStatus.MSP_AUDIO_SAMPLE_LAST)
        {
            int len = 10 * FRAME_LEN; //16k音频,10帧 (时长200ms)
            audio_stat = audioStatus.MSP_AUDIO_SAMPLE_CONTINUE;
            if (audio_size <= len)
            {
                len        = audio_size;
                audio_stat = audioStatus.MSP_AUDIO_SAMPLE_LAST; //最后一块
            }
            if (0 == audio_count)
            {
                audio_stat = audioStatus.MSP_AUDIO_SAMPLE_FIRST;
            }
            //Debug.Log(string.Format("{1} 音频长度[{0}]", len, DateTime.Now.Ticks));
            ret = MSC.QIVWAudioWrite(sid, audio_buffer.Skip(audio_count).Take(len).ToArray(), (uint)len, audio_stat);
            if (ret != (int)ErrorCode.MSP_SUCCESS)
            {
                Debug.Log(string.Format("{0} 语音唤醒失败:{1}", DateTime.Now.Ticks, ret)); return;
            }
            audio_count += len;
            audio_size  -= len;
            Thread.Sleep(200);
        }
    }
Esempio n. 3
0
    /// <summary>
    /// 语音识别方法
    /// </summary>
    /// <param name="sid"></param>
    public static void SpeechRecognition(List <VoiceData> VoiceBuffer)
    {
        audio_stat = audioStatus.MSP_AUDIO_SAMPLE_CONTINUE;
        ep_status  = epStatus.MSP_EP_LOOKING_FOR_SPEECH;
        recoStatus = RecogStatus.ISR_REC_STATUS_SUCCESS;
        sid        = Utils.Ptr2Str(MSC.QISRSessionBegin(string.Empty, speech_param, ref ret));
        Debug.Log(string.Format("-->开启一次语音识别[{0}]", sid));
        if (ret != (int)ErrorCode.MSP_SUCCESS)
        {
            Debug.Log("加载失败!"); return;
        }

        for (int i = 0; i < VoiceBuffer.Count(); i++)
        {
            audio_stat = audioStatus.MSP_AUDIO_SAMPLE_CONTINUE;
            if (i == 0)
            {
                audio_stat = audioStatus.MSP_AUDIO_SAMPLE_FIRST;
            }
            ret = MSC.QISRAudioWrite(sid, VoiceBuffer[i].data, (uint)VoiceBuffer[i].data.Length, audio_stat, ref ep_status, ref recoStatus);
            if ((int)ErrorCode.MSP_SUCCESS != ret)
            {
                MSC.QISRSessionEnd(sid, null);
            }
        }

        ret = MSC.QISRAudioWrite(sid, null, 0, audioStatus.MSP_AUDIO_SAMPLE_LAST, ref ep_status, ref recoStatus);
        if ((int)ErrorCode.MSP_SUCCESS != ret)
        {
            Debug.Log("\nQISRAudioWrite failed! error code:" + ret);
            return;
        }

        while (RecogStatus.ISR_REC_STATUS_SPEECH_COMPLETE != recoStatus)
        {
            IntPtr rslt = MSC.QISRGetResult(sid, ref recoStatus, 0, ref ret);
            if ((int)ErrorCode.MSP_SUCCESS != ret)
            {
                Debug.Log("\nQISRGetResult failed, error code: " + ret);
                break;
            }
            if (IntPtr.Zero != rslt)
            {
                string tempRes = Utils.Ptr2Str(rslt);

                ask_rec_result = ask_rec_result + tempRes;
                if (ask_rec_result.Length >= BUFFER_SIZE)
                {
                    Debug.Log("\nno enough buffer for rec_result !\n");
                    break;
                }
            }
        }
        int errorcode = MSC.QISRSessionEnd(sid, "正常结束");

        //语音识别结果
        if (ask_rec_result.Length != 0)
        {
            FlowManage.P2MMode(nar);
            Debug.Log("识别结果是:" + ask_rec_result);
            ask_rec_result = "";
        }
    }
Esempio n. 4
0
 public static extern int QIVWAudioWrite(string sessionID, byte[] audioData, uint audioLen, audioStatus audioStatus);
Esempio n. 5
0
 public static extern int QISRAudioWrite(string sessionID, byte[] waveData, uint waveLen, audioStatus audioStatus, ref epStatus epStatus, ref RecogStatus recogStatus);