/// <summary> /// 异步调用ASRT语音识别语言模型 /// </summary> /// <param name="sequencePinyin"></param> /// <returns></returns> public override async Task <object> RecogniteLanguageAsync(string[] sequencePinyin) { AsrtApiLanguageRequest requestBody = new AsrtApiLanguageRequest(sequencePinyin); string rsp = await Common.HttpPostAsync(_url + _subPath + "/language", "application/json", requestBody.ToJson()); AsrtApiResponse responseBody = new AsrtApiResponse(); responseBody.FromJson(rsp); return(responseBody); }
/// <summary> /// 异步调用ASRT语音识别声学模型 /// </summary> /// <param name="wavData"></param> /// <param name="sampleRate"></param> /// <param name="channels"></param> /// <param name="byteWidth"></param> /// <returns></returns> public override async Task <object> RecogniteSpeechAsync(byte[] wavData, int sampleRate, int channels, int byteWidth) { AsrtApiSpeechRequest requestBody = new AsrtApiSpeechRequest(wavData, sampleRate, channels, byteWidth); string rsp = await Common.HttpPostAsync(_url + _subPath + "/speech", "application/json", requestBody.ToJson()); AsrtApiResponse responseBody = new AsrtApiResponse(); responseBody.FromJson(rsp); return(responseBody); }
private async void Timer_Tick(object sender, object e) { Console.WriteLine("Ailemon.Asrt.AsrtClientProxy: 录音周期流式传输"); // 定时从缓存中读取wave数据,并送去语音识别 Stream waveMemStream = _audioRecorder.PopMemoryStream(); WaveData wav = SDK.ReadWaveDatas(waveMemStream); AsrtApiResponse rsp = (AsrtApiResponse)await _speechRecognizer.RecogniteAsync(wav.byteWavs, wav.sampleRate, wav.channels, wav.byteWidth); AsrtResult result = new AsrtResult((string)rsp.Result, true, rsp.StatusCode, rsp.StatusMessage); OnReceiveText(this, result); }
private async void btn_recognite_file_Click(object sender, RoutedEventArgs e) { string filename = ""; Microsoft.Win32.OpenFileDialog openFileDialog = new Microsoft.Win32.OpenFileDialog(); openFileDialog.Filter = "WAVE音频文件|*.wav"; openFileDialog.DefaultExt = "WAVE音频文件|*.wav"; if (openFileDialog.ShowDialog() == true) { filename = openFileDialog.FileName; Ailemon.Asrt.BaseSpeechRecognizer sr = Ailemon.Asrt.SDK.GetSpeechRecognizer(host, port, protocol); Ailemon.Asrt.AsrtApiResponse rsp = (Ailemon.Asrt.AsrtApiResponse) await sr.RecogniteFile(filename); System.Console.WriteLine((string)rsp.Result); AsrtResult result = new AsrtResult("\n" + (string)rsp.Result + "\n", true, rsp.StatusCode, rsp.StatusMessage); SpeechRecognizer_OnReceiveText(sender, result); } }
/// <summary> /// 异步停止客户端调用ASRT语音识别 /// </summary> public async void StopAsync() { if (_isRecognizing) { timer.Stop(); Console.WriteLine("Ailemon.Asrt.AsrtClientProxy: 停止录音识别"); try { // 从缓存中读取wave数据,并送去语音识别 Stream waveMemStream = _audioRecorder.Stop(); WaveData wav = SDK.ReadWaveDatas(waveMemStream); AsrtApiResponse rsp = (AsrtApiResponse)await _speechRecognizer.RecogniteAsync(wav.byteWavs, wav.sampleRate, wav.channels, wav.byteWidth); AsrtResult result = new AsrtResult((string)rsp.Result, true, rsp.StatusCode, rsp.StatusMessage); OnReceiveText(this, result); //产生事件 } catch (Exception ex) { string str = ex.Message; } _isRecognizing = false; } }