private void CheckAudioPlayState() { // check audio play state. if (IsFinished) { //Debug.LogWarning("Speak Finished"); isTTSStarted = false; if (currentSpeech == null) { return; } if (currentSpeech.shouldGoNext) { surveyController.NextStep(); } if (currentSpeech.shouldWaitForAnswer && !surveyController.QuizFinished) { //Debug.Log(webSurvey.GetCurrentStep() + " 문제 답변 대기"); //Debug.Log(" 문제 답변 대기"); surveyController.WaitForAnswer(); } currentSpeech = null; } }
public void QueuingSpeech() { var sheet = new ES3Spreadsheet(); sheet.Load(SpeechDataManager.instance.ScriptName); speechQueue = new Queue <SpeechInfo>(); int speechStart = 0; int speechLength = 0; foreach (SpeechDataManager.SpeechSheetInfo info in SpeechDataManager.instance.speechSheetInfos) { if (speechId != info.speechId) { continue; } else { speechStart = info.speechRow; speechLength = info.speechLength; break; } } for (int row = speechStart; row < speechStart + speechLength - 1; row++) { SpeechInfo speechInfo = new SpeechInfo(); speechInfo.speecher = sheet.GetCell <string>(0, row); speechInfo.speechText = sheet.GetCell <string>(1, row); speechQueue.Enqueue(speechInfo); } SpeechStart(); }
public void Play(string speech) { //webSurvey.robotFacialRenderer.Play(speakFaceName); currentSpeech = new SpeechInfo(speech, surveyController); quizStatusManager.SetContentState(currentSpeech.GetContentState); TextToSpeech(speech); }
/// <summary> /// Method to update LSC speech filter information /// </summary> /// <param name="connectionString">connectionString</param> /// <param name="uId">uId</param> /// <param name="sp">sp</param> public SpeechInfo UpdateLSCSpeechFilter(string connectionString, string uId, SpeechInfo sp) { try { return(settingDal.UpdateLSCSpeechFilter(connectionString, uId, sp)); } catch { throw; } }
/// <summary> /// Method to update CSC speech filter information /// </summary> /// <param name="lscId">lscId</param> /// <param name="uId">uId</param> /// <param name="sp">sp</param> public void UpdateCSCSpeechFilter(int lscId, string uId, SpeechInfo sp) { try { settingDal.UpdateCSCSpeechFilter(lscId, uId, sp); } catch { throw; } }
private IEnumerator speakCoroutine(SpeechInfo si) { yield return(getRawSoundData(si)); audioSource.clip = convertAudioClip(); audioSource.Play(); do { yield return(new WaitForFixedUpdate()); } while (audioSource.isPlaying); callback(); }
/// <summary> /// DBNull Alarm Sound Fiter Item Handler /// </summary> /// <param name="val">val</param> public static SpeechInfo DBNullAlarmSoundFiterItemHandler(object val) { try { if (val != DBNull.Value) { var byteCols = ASCIIEncoding.Default.GetString((byte[])val); var cols = byteCols.Split('\t'); if (cols.Length == 15) { var sp = new SpeechInfo(); sp.AL1Enabled = Boolean.Parse(cols[0]); sp.AL2Enabled = Boolean.Parse(cols[1]); sp.AL3Enabled = Boolean.Parse(cols[2]); sp.AL4Enabled = Boolean.Parse(cols[3]); sp.SpDevFilter = cols[4]; sp.SpNodeFilter = cols[5]; sp.SpDisconnect = Boolean.Parse(cols[6]); sp.SpLoop = Boolean.Parse(cols[7]); sp.SpArea2 = Boolean.Parse(cols[8]); sp.SpArea3 = Boolean.Parse(cols[9]); sp.SpStation = Boolean.Parse(cols[10]); sp.SpDevice = Boolean.Parse(cols[11]); sp.SpNode = Boolean.Parse(cols[12]); sp.SpALDesc = Boolean.Parse(cols[13]); sp.UpdateTime = DateTime.Now; return(sp); } } } catch { } return(new SpeechInfo() { SpDisconnect = false, AL1Enabled = false, AL2Enabled = false, AL3Enabled = false, AL4Enabled = false, SpDevFilter = String.Empty, SpNodeFilter = String.Empty, SpLoop = false, SpArea2 = false, SpArea3 = false, SpStation = false, SpDevice = false, SpNode = false, SpALDesc = false, UpdateTime = DateTime.Now }); }
/// <summary> /// Submit Nodes /// </summary> protected void SubmitNodes(object sender, SubmitEventArgs e) { try { var lscEntity = new BLsc(); var lscs = lscEntity.GetLscs(); var reportSettingEntity = new BSetting(); var userData = UserData; foreach (var sNode in e.RootNode.Children) { var lsc = lscs.Find(l => { return(l.LscID.ToString().Equals(sNode.NodeID)); }); if (lsc == null) { continue; } var lscUser = userData.LscUsers.Find(lu => { return(lu.LscID == lsc.LscID); }); if (lscUser == null) { continue; } var sp = new SpeechInfo(); sp.SpDisconnect = Boolean.Parse(sNode.Attributes["SpDisconnect"].ToString()); sp.AL1Enabled = Boolean.Parse(sNode.Attributes["AL1Enabled"].ToString()); sp.AL2Enabled = Boolean.Parse(sNode.Attributes["AL2Enabled"].ToString()); sp.AL3Enabled = Boolean.Parse(sNode.Attributes["AL3Enabled"].ToString()); sp.AL4Enabled = Boolean.Parse(sNode.Attributes["AL4Enabled"].ToString()); sp.SpDevFilter = sNode.Attributes["SpDevFilter"].ToString(); sp.SpNodeFilter = sNode.Attributes["SpNodeFilter"].ToString(); sp.SpLoop = Boolean.Parse(sNode.Attributes["SpLoop"].ToString()); sp.SpArea2 = Boolean.Parse(sNode.Attributes["SpArea2"].ToString()); sp.SpArea3 = Boolean.Parse(sNode.Attributes["SpArea3"].ToString()); sp.SpStation = Boolean.Parse(sNode.Attributes["SpStation"].ToString()); sp.SpDevice = Boolean.Parse(sNode.Attributes["SpDevice"].ToString()); sp.SpNode = Boolean.Parse(sNode.Attributes["SpNode"].ToString()); sp.SpALDesc = Boolean.Parse(sNode.Attributes["SpALDesc"].ToString()); sp.UpdateTime = DateTime.Now; var connectionString = WebUtility.CreateLscConnectionString(lsc); var localSpeech = reportSettingEntity.UpdateLSCSpeechFilter(connectionString, sNode.Attributes["SpUID"].ToString(), sp); reportSettingEntity.UpdateCSCSpeechFilter(lsc.LscID, sNode.Attributes["SpUID"].ToString(), sp); lscUser.AlarmSoundFiterItem = sp; } WebUtility.ShowNotify(EnmErrType.Info, "数据已保存成功!"); } catch (Exception err) { WebUtility.WriteLog(EnmSysLogLevel.Error, EnmSysLogType.Exception, err.ToString(), Page.User.Identity.Name); WebUtility.ShowMessage(EnmErrType.Error, err.Message); } }
void NextBubble() { m_CurrentSpeechTextIndex++; // Check if we are done with the current speech. if (m_CurrentSpeechTextIndex == m_CurrentSpeech.SpeechBubbleInfos.Count) { if (!m_CurrentSpeech.Repeat) { OnSpeechFinished?.Invoke(m_CurrentSpeech.Id); } m_CurrentSpeechTextIndex = 0; // Find the next active speech. if (GetActiveSpeechCount() > 0) { // Find current speech index. The speech can have been removed. var currentIndex = 0; for (var i = 0; i < m_Speeches.Count; i++) { if (m_CurrentSpeech == m_Speeches.ElementAt(i)) { currentIndex = i; break; } } // Get next speech index and stop when an active speech has been found. // We know this loop will terminate as we have at least one active speech. do { currentIndex = (currentIndex + 1) % m_Speeches.Count; }while (!m_Speeches.ElementAt(currentIndex).Active); m_CurrentSpeech = m_Speeches[currentIndex]; } } // Activate the new bubble. if (GetActiveSpeechCount() > 0) { StartActivate(); } }
// APIのサーバに送るSSML(音声合成用の記述言語)を生成 private string generateSSML(SpeechInfo si) { return("<?xml version=\"1.0\" encoding=\"utf-8\" ?>" + "<speak version=\"1.1\">" + "<voice name=\"" + voiceName + "\">" + "<prosody pitch=\"" + si.pitch.ToString("0.00") + "\">" + "<prosody range=\"" + si.range.ToString("0.00") + "\">" + "<prosody rate=\"" + si.rate.ToString("0.00") + "\">" + "<prosody volume=\"" + si.volume.ToString("0.00") + "\">" + si.text + "</prosody>" + "</prosody>" + "</prosody>" + "</prosody>" + "</voice>" + "</speak>"); }
public void Activate(int id) { var speechInfo = m_Speeches.FirstOrDefault(entry => entry.Id == id); if (speechInfo != null) { speechInfo.Active = true; if (GetActiveSpeechCount() == 1) { m_CurrentSpeech = speechInfo; m_CurrentSpeechTextIndex = 0; StartActivate(); m_PromptHandler.Activate(PromptPlacementHandler.PromptType.SpeechBubble); } } }
public void SpeechStart() { if (speechQueue.Count == 0) { return; } SpeechInfo speechInfo = speechQueue.Dequeue(); speecher = GameObject.Find(speechInfo.speecher); if (speecher == null) { Debug.Log(speechInfo.speecher); Debug.Log("is not exist!"); return; } speechData.scriptData = speechInfo.speechText; SpeechBubbleManager.instance.CreateBubble(speechData, Vector2.up, SpeechStart, speecher.transform); }
static void ResetSpeechEngine(object obj) { // Log function entrance TraceLog.TraceFunction(); SpeechInfo si = (SpeechInfo)obj; SpeechRecognitionEngine sre = si.Engine; byte[] speechByteArray = si.SpeechByteArray; MemoryStream ms = new MemoryStream(speechByteArray); sre.SetInputToAudioStream(ms, formatInfo); // run the recognition again (which will take longer, but then reset the recognizer to // a state where it runs quickly on the next invocation) sre.Recognize(); ReleaseSpeechEngine(sre); }
/// <summary> /// Method to update LSC speech filter information /// </summary> /// <param name="connectionString">connectionString</param> /// <param name="uId">uId</param> /// <param name="sp">sp</param> public SpeechInfo UpdateLSCSpeechFilter(string connectionString, string uId, SpeechInfo sp) { try { SqlParameter[] parms = { new SqlParameter("@UID", SqlDbType.VarChar, 20), new SqlParameter("@AlarmSoundFiterItem", SqlDbType.Image) }; parms[0].Value = uId; parms[1].Value = ASCIIEncoding.Default.GetBytes(String.Format("{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}\t{8}\t{9}\t{10}\t{11}\t{12}\t{13}\t", sp.AL1Enabled, sp.AL2Enabled, sp.AL3Enabled, sp.AL4Enabled, sp.SpDevFilter, sp.SpNodeFilter, sp.SpDisconnect, sp.SpLoop, sp.SpArea2, sp.SpArea3, sp.SpStation, sp.SpDevice, sp.SpNode, sp.SpALDesc)); SqlHelper.TestConnection(connectionString); using (var conn = new SqlConnection(connectionString)) { conn.Open(); var trans = conn.BeginTransaction(IsolationLevel.ReadCommitted); try { SqlHelper.ExecuteNonQuery(trans, CommandType.Text, SqlText.SQL_UPDATE_SETTING_UPDATELSCSPEECHFILTER, parms); trans.Commit(); } catch { trans.Rollback(); throw; } } return(sp); } catch { throw; } }
// APIのサーバから音声データを取得するコルーチン private IEnumerator getRawSoundData(SpeechInfo si) { var data = System.Text.Encoding.UTF8.GetBytes(generateSSML(si)); var headers = new Dictionary <string, string>() { { "Content-Type", "application/ssml+xml" }, { "Accept", "audio/L16" } }; var www = new WWW(API_URL, data, headers); yield return(www); if (!string.IsNullOrEmpty(www.error)) { Debug.LogError("www Error:" + www.error); yield break; } rawSoundData = www.bytes; }
public void speak(SpeechInfo si) { mb.StartCoroutine(speakCoroutine(si)); }