void Update() { if (sending) { if (stopRecording) { Microphone.End(Microphone.devices[0]); GameObject.Find("Button Speak").GetComponentInChildren <UnityEngine.UI.Text>().text = "Ask"; } else { pos = Microphone.GetPosition(null); diff = pos - lastPos; if (diff > 0) { float[] samples = new float[diff * m_clip.channels]; m_clip.GetData(samples, lastPos); wav_clip = AudioClip.Create("", samples.Length, m_clip.channels, 16000, true, false); wav_clip.SetData(samples, 0); if (lastPos == 0) { audio_stream = WavUtility.FromAudioClip(wav_clip, out audio_teste, true); } else { audio_stream = WavUtility.ConvertToByteArray(wav_clip); } request.add_audio(audio_stream.Length, audio_stream); lastPos = pos; } } } }
public bool InitA(string vpath, string audioStr, string sourceText) { jumpFlag = false; if (string.IsNullOrWhiteSpace(vpath) || string.IsNullOrWhiteSpace(sourceText)) { jumpFlag = true; return(false); } try { loadOK = false; byte[] bytes = CheakerTools.Base64_Decode(audioStr); AudioClip ac = WavUtility.ToAudioClip(bytes, 0, "UserPartA"); userAS.clip = ac; timeLine.minValue = 0f; timeLine.value = 0f; //timeLine.maxValue = ac.length; textSource.text = sourceText; video.url = vpath; video.prepareCompleted += OnCompete; video.errorReceived += OnError; video.Prepare(); return(true); } catch { return(false); } }
public static Gpw ParseGpw(string fileName) { if (GpwCache.TryGetValue(fileName, out Gpw gpw)) { return(gpw); } using (FastBinaryReader br = VirtualFilesystem.Instance.GetFileStream(fileName)) { gpw = new Gpw(); string header = br.ReadCString(4); // Always GAS0 gpw.AudioRange = (short)br.ReadUInt16(); ushort unk2 = br.ReadUInt16(); int unk3 = br.ReadInt32(); int unk4 = br.ReadInt32(); int unk5 = br.ReadInt32(); int unk6 = br.ReadInt32(); int unk7 = br.ReadInt32(); br.Position += 4; // Skip RIFF header int waveFileSize = br.ReadInt32() + 8; // Read total size of audio data from RIFF header br.Position -= 8; // Go back to RIFF header byte[] audioData = br.ReadBytes(waveFileSize); gpw.Clip = WavUtility.ToAudioClip(audioData, fileName); GpwCache.Add(fileName, gpw); return(gpw); } }
private IEnumerator Play(Stream audioData) { if (isPlaying) { yield break; } isPlaying = true; audioSource.Play(); var audio = StreamToBytes(audioData); if (audio == null) { isNewAudio = false; isPlaying = false; yield break; } var clip = WavUtility.ToAudioClip(audio); float time = clip.length; while (time > 0) { yield return(null); time -= Time.deltaTime; } audioData.Dispose(); audioData = null; isNewAudio = false; isPlaying = false; }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); audioSource = null; } } //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/test-67717/agent/sessions/34563:detectIntent", "ya29.c.ElpvBmZHK1F7pwhaNzKBmYTvNFeluNS2cZbnOqpYLk3SrpoRqJX8L40nEvVb0hVMZigcHSVSKUBA2rVBwgsb3KrGuIQ2tpFDORok_JUy2aHwj83P7fyy91oqJOI", bytes)); } }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); audioSource = null; } } const string BEARER_TOKEN = "ya29.c.Kp0B8gdnXH6hFiGGAr59tUxMWOfEN4atmEtuKQprL4fhpuu6XL9HmmwVYQsTCo3ceyn-N2Z_jYapeWBUOx-BQQxymx7hKoYaLs23u4fOpZeXBnMCR-RSMEZVTaiLkLWLbh6FqQIoATTCm-rqHyua8icaQwxJhwFfXMBkEUZZvNTYJCFVZtHSRpjm7uaNKXR87PEXW8pxdJR21PpVeNBZpA"; //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/vinay-ovis/agent/sessions/34563:detectIntent", BEARER_TOKEN, bytes)); } }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); // audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); // audioSource = null; } } // UUID generation var uuid = System.Guid.NewGuid(); Debug.Log(uuid); var requestUrl = dialogflowUrl + uuid + ":" + dialogflowMethod; Debug.Log(requestUrl); //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest(requestUrl, bytes)); } }
public string SaveWavFile(AudioClip aud) { string filepath; WavUtility.FromAudioClip(aud, out filepath, true); return(filepath); }
/// <summary> /// Unity Update checks for FadeOut and starts playing the next song if fadeout is finished. /// It also checks if the loading routine (for Mp3) is finished and plays that song next /// </summary> void Update() { if (IsFadeOut) { if (Time.time >= mFadeEndTime) { AudioSource.Stop(); IsFadeOut = false; } else { AudioSource.volume = Mathf.Lerp(0, 1, (mFadeEndTime - Time.time) / FadeOutSeconds); } } else if (PlayOnceFaded) { PlayOnceFaded = false; AudioSource.clip = waitingClip; AudioSource.volume = 1f; AudioSource.Play(); } if (LoadSuccess) { LoadSuccess = false; StartClip(WavUtility.ToAudioClip(RawWaveFile)); } }
public AudioClip Load(string memory_name) { Debug.Log("CutEffectLoader.Load"); // Open shared memory MemoryMappedFile share_mem = MemoryMappedFile.OpenExisting(memory_name); MemoryMappedViewAccessor accessor = share_mem.CreateViewAccessor(); // Write data to shared memory int size = accessor.ReadInt32(0); byte[] data = new byte[size]; accessor.ReadArray <byte>(sizeof(int), data, 0, data.Length); Debug.Log("DataSize = " + size); // Dispose resource accessor.Dispose(); share_mem.Dispose(); clip = WavUtility.ToAudioClip(data, 0, "wav"); Debug.Log("clip = " + clip.loadState); return(clip); }
protected float[] ByteArrayToWavFloatArray(byte[] sourceData) { float[] data; switch (_wavContainer.BitDepth) { case 8: data = WavUtility.Convert8BitByteArrayToAudioClipData(sourceData, _offset, sourceData.Length - _offset); break; case 16: data = WavUtility.Convert16BitByteArrayToAudioClipData(sourceData, _offset, sourceData.Length - _offset, _wavContainer.Format); break; case 24: data = WavUtility.Convert24BitByteArrayToAudioClipData(sourceData, _offset, sourceData.Length - _offset, _wavContainer.Format); break; case 32: data = WavUtility.Convert32BitByteArrayToAudioClipData(sourceData, _offset, sourceData.Length - _offset, _wavContainer.Format); break; default: throw new Exception(_wavContainer.BitDepth + " bit depth is not supported."); } return(data); }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); audioSource = null; } } const string BEARER_TOKEN = "ya29.c.Kp0B8weYzsyvIRfZqtXz-w9lsNAWTlUyBQwFhAxgd_9V_KC-CgO-rjJ5P1LnExg3xVXL1SHwS-qsAUI0yNtBA-kI556fC72GpAr3jO6rGohjUB4BWURZkSqz6Ma4mOXD1chnCeH7h_Exw6ToA8uiirJl5aP6mKfQ_YNM9P1VedMzIUx4P95zOj2wC9g3IjJVxhgd2hyd6AY3aGSNE_Y-Mw"; //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/vinay-ovis/agent/sessions/34563:detectIntent", BEARER_TOKEN, bytes)); } }
public static AudioClip LoadAssetsSound(string filename) { if (LAppDefine.DEBUG_LOG) { Debug.Log("Load voice : " + filename); } try { if (Configs["DefaultModel"] == "true") { return((AudioClip)(Resources.Load(filename)) as AudioClip); } else { if (!File.Exists(filename + ".wav")) { var stream = File.Open(filename + ".mp3", FileMode.Open); var reader = new Mp3FileReader(stream); WaveFileWriter.CreateWaveFile(filename + ".wav", reader); } return(WavUtility.ToAudioClip(filename + ".wav")); } } catch (IOException e) { Debug.Log(e.StackTrace); return(null); } }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); audioSource = null; } } const string BEARER_TOKEN = "ya29.c.Kp0B8wdhqo9NtRRi2b-c0nps1bJMI4xRQuzk20Ecb9NhfZpuP7baDmzpTQ7ZSHhleAHjAFUjA070_Eygecz7cTxDS_AOVhkjgAlM2KK57wuu0iuWfboIRo6jHi32DvOpy9Zk26vBZojIBOglwGkOnstSDKnzIcSCAPw3EbLqAuWTJTOErUaXQzteRe_Zwg-wXpPYkYpruw2TizRj-kTCuQ"; //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/vinay-ovis/agent/sessions/34563:detectIntent", BEARER_TOKEN, bytes)); } }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); audioSource = null; } } const string BEARER_TOKEN = "ya29.c.Kp0B8wcdehGnkdTG2jJX28zzPrrodAqxa6U4dEgZRQMcskXEwqixFEqOo6UOFbYm7UUt4_JjAGkp1rPwiE_CNEl6GIC9QNngmFVZaIp09FpyAbQAGhHARV5OpK481SkjpaIUvjNta-krDxy4W7q-T3xT_XouujExG8o6fyv8CdTSkdYcuO-d4_h9-AZeOxm6-igY-ufSpshxsQiv_P1hxw"; //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/vinay-ovis/agent/sessions/34563:detectIntent", BEARER_TOKEN, bytes)); } }
public void OnPointerUp(PointerEventData eventData) { transform.GetChild(0).gameObject.SetActive(true); // Start clip creation int micDuration = Microphone.GetPosition(""); Microphone.End(""); float[] samples = new float[m_RecordedAudio.samples]; m_RecordedAudio.GetData(samples, 0); float[] clipSamples = new float[micDuration]; if (clipSamples.Length == 0) { ResetSprite(); transform.GetChild(0).gameObject.SetActive(false); return; } System.Array.Copy(samples, clipSamples, clipSamples.Length - 1); var clip = AudioClip.Create("playRecordClip", clipSamples.Length, 1, m_MaxMicrophoneFrequency, false); clip.SetData(clipSamples, 0); // End clip creation var bytes = WavUtility.FromAudioClip(clip); m_VoiceList.Add(new KeyValuePair <AudioClip, byte[]>(clip, bytes)); // Add voice data to collection for later use var role = FindObjectOfType <PhotonGameplay>().player_Role; PhotonNetwork.RaiseEvent(((role == PhotonGameplay.PlayerRole.Support) ? (byte)EventCodes.AudioSupportToDiffuse : (byte)EventCodes.AudioDiffuseToSupport), bytes, Photon.Realtime.RaiseEventOptions.Default, SendOptions.SendReliable); }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); // audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); // audioSource = null; } } //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/coffee-wnsicn/agent/sessions/1234:detectIntent", "{accessbearertoken}", bytes)); } }
public void StopListening() { if (recordingActive) { //float[] samples = null; lock (thisLock) { if (recordingActive) { StopRecording(); //samples = new float[audioSource.clip.samples]; //audioSource.clip.GetData(samples, 0); bytes = WavUtility.FromAudioClip(audioSource.clip); audioSource.Play(); Debug.Log("This is the audiosource clip length: " + bytes.Length); audioSource = null; } } const string BEARER_TOKEN = "ya29.c.KqQB8wdBAAPlRvTqA4kee_kVPizjf7L1P_FIP6nA-0YxMI12a898fbkepewfeexhuuEdUoABo4tCSYUNSmc0RwfMkFAMXRskGTbIBdAEzNXDOR1ZgkAUUnBiQ-rORvkaHtXQPQIkYHPEky9moAexD0CJxRfZN4xC2sX2nCDLhVLZUDJK6bEQl-W2eOc-W__l8hrlvQT6puNwSg0iHHaHCInAyDVdWmo"; //new Thread(StartVoiceRequest).Start(samples); StartCoroutine(StartVoiceRequest("https://dialogflow.googleapis.com/v2/projects/vinay-ovis/agent/sessions/34563:detectIntent", BEARER_TOKEN, bytes)); } }
IEnumerator UploadWAV() { string name = "hub_son.wav"; //nom du fichier stocké en BDD AudioClip temp_Snd = audioClip; // Encodage, WAV => BYTE[] byte[] bytes = WavUtility.FromAudioClip(temp_Snd); //utilisation de la librairie WavUtility pour convertir audioclip en byte array Destroy(temp_Snd); // Creation WWWForm WWWForm form = new WWWForm(); form.AddBinaryData("son", bytes, name, "audio/x-wav"); // Upload via Script PHP using (var w = UnityWebRequest.Post(uploadSonURL, form)) { yield return(w.SendWebRequest()); if (w.isNetworkError || w.isHttpError) { print(w.error); } else { print("Musique correctement uploadée"); } } }
public static void RecordAudio(string keyWord, int second, Action <bool> onResult) { Assert.IsNotNull(onResult, RECORD_AUDIO_NULL_ACTION); Assert.IsTrue(!string.IsNullOrEmpty(keyWord), RECORD_AUDIO_EMPTY_KEYWORD); Assert.IsTrue(second > 0, RECORD_AUDIO_SECOND_IS_ZERO); Instance._onResultAudio = onResult; Instance.StartCoroutine(StartRecording()); IEnumerator StartRecording() { //AudioListener.pause = true; var clip = Microphone.Start(null, false, second, 44100); yield return(new WaitUntil(() => Microphone.IsRecording(null) == false)); //AudioListener.pause = false; WavUtility.FromAudioClip(clip, out string path, true); RecordAudioData recordData = new RecordAudioData(keyWord, path); Debug.Log(recordData.ToString()); UnityMessageManager.Instance.SendMessageToRN(recordData.ToString()); } }
private AudioClip GetAudioClip(string wavString) { JSONNode jsonAudio = JSON.Parse(wavString); string wavContent = (jsonAudio["audioContent"].ToString().Replace("\"", "")); return(WavUtility.ToAudioClip(Convert.FromBase64String(wavContent))); }
private void SaveAsWav() { string filepath; byte[] bytes = WavUtility.FromAudioClip(audioClip, out filepath, true); Log.Text(label, "Saved .wav file as:" + filepath, "Saved audio bytes:" + bytes.Length + " filepath:" + filepath); localPath = filepath; }
public byte[] SaveWavFile(AudioClip audioClip, string sourceName) { string filepath; byte[] bytes = WavUtility.FromAudioClip(audioClip, out filepath, true, sourceName); ///var t = new Tuple<string, byte[]>(filepath, bytes); return(bytes); }
public string SaveWavFile() { string filepath; byte[] bytes = WavUtility.FromAudioClip(FinalClip, out filepath, true); Debug.Log(filepath); return(filepath); }
public void LoadAllResources() { using (var stream = GetResourceStream("beep.wav")) { var bytes = ReadAllBytes(stream); CheatBeep = WavUtility.ToAudioClip(bytes); } }
public string SaveWavFile() { string filepath = "sssss"; Debug.LogWarning("Saving in progress~~~~~~~~~~~~~~~"); byte[] bytes = WavUtility.FromAudioClip(audio, out filepath, true); return(filepath); }
// Audio/REST Functions public string SaveToWav(AudioClip clip) { string filepath; byte[] bytes = WavUtility.FromAudioClip(clip, out filepath, true); Debug.Log("Saved audio bytes: " + bytes.Length + " filepath:" + filepath); return(filepath); }
IEnumerator RecordStart() { Log.Instance.log("Record Start"); sTTStatus = STTStatus.RecordIng; Message.Send <STTCheck>(new STTCheck(sTTStatus)); _audioSource.clip = Microphone.Start(null, false, 100, Msm.STTFrequency); int _lastSample = 0; while (isRecord) { yield return(null); int pos = Microphone.GetPosition(null); int diff = pos - _lastSample; if (diff > 0) { Array.Clear(sendBuffer, 0, sendBuffer.Length); float[] samples = new float[diff * _audioSource.clip.channels]; _audioSource.clip.GetData(samples, _lastSample); byte[] bytes = WavUtility.ConvertAudioClipDataToInt16ByteArray(samples); string protocolString = "VD" + bytes.Length; byte[] protocolBytes = StringToByte(protocolString); sendBuffer = new Byte[protocolBytes.Length + bytes.Length]; Buffer.BlockCopy(protocolBytes, 0, sendBuffer, 0, protocolBytes.Length); Buffer.BlockCopy(bytes, 0, sendBuffer, protocolBytes.Length, bytes.Length); try { if (diff >= 2560) { socket.Send(sendBuffer); _lastSample = pos; } } catch (Exception E) { SendLog("RecordStart Socekt Send Error"); Log.Instance.log("RecordStart Socekt Send Error: " + E.ToString()); isRecord = false; if (socket != null) { socket.Close(); socket = null; } if (recvThread != null && recvThread.IsAlive) { recvThread.Abort(); recvThread = null; } Message.Send <STTReceiveMsg>(new STTReceiveMsg("reset", "reset")); } } } }
/// <summary> /// 上传音频文件 /// </summary> public string UploadAudioClip() { var clip = WavUtility.FromAudioClip(Audio.clip); TextType tt = new TextType { }; DownloadProp.Instance.UploadAudioText(tt, clip); return(PublicAttribute.GetSecretKey()); }
void OnEndTalk(PointerEventData f) { AudioClip ap; int length; MicrophoneMgr.EndRecording(out length, out ap); MessageMgr.SendObject(MessageData.Init(new MessageData(), WavUtility.FromAudioClip(ap), this.UserName)); }