/// <summary> /// 发送音频数据 /// </summary> void SendAudio() { //获取SDK捕捉的音频数据 AudioPacket packet = UnityChatSDK.Instance.GetAudio(); if (packet != null) { packet.Id = ChatManager.Instance.UserID; byte[] audio = GetPbAudioPacket(packet).ToByteArray(); //UDP发送数据到服务器(可更改为自己的服务器发送接口) if (audio != null) { UdplDataModel model = new UdplDataModel(); model.Request = RequestByte.REQUEST_AUDIO; CallInfo info = new CallInfo(); info.UserID = ChatManager.Instance.UserID; info.CallID = ChatManager.Instance.CallID; info.PeerList.Add(ChatManager.Instance.ChatPeerID); model.ChatInfoData = info.ToByteArray(); model.ChatData = audio; UdpSocketManager.Instance.Send(UdpMessageCodec.Encode(model)); } } }
void RemovePacket(AudioPacket p) { lock (_dataMutex) { _graveYard.Add(p); } }
//==================onReceive data======================== /// <summary> /// called when audio data is received /// </summary> /// <param name="data"></param> public void OnReceiveAudio(byte[] data) { //decode audio data and playback AudioPacket packet = DecodeAudioPacket(data); UnityChatSDK.Instance.DecodeAudioData(packet); }
/// <summary> /// Send audio data /// </summary> void SendAudio() { //Get audio data by SDK AudioPacket packet = UnityChatSDK.Instance.GetAudio(); if (packet != null) { packet.Id = ChatManager.Instance.UserID; byte[] audio = GetPbAudioPacket(packet).ToByteArray(); //UDP Send data to server if (audio != null) { UdplDataModel model = new UdplDataModel(); model.Request = UdpRequest.REQUEST_AUDIO; IMInfo info = new IMInfo(); info.UserID = ChatManager.Instance.UserID; info.CallID = ChatManager.Instance.CallID; info.UserList.Add(ChatManager.Instance.ChatPeers); model.ChatInfoData = info.ToByteArray(); model.ChatData = audio; UdpSocketManager.Instance.Send(UdpMessageCodec.Encode(model)); } } }
//播放语音 public IEnumerator PlaySpeek(AudioPacket packet) { ApplicationFacade.Instance.SendNotification(NotificationConstant.MEDI_BATTLEVIEW_SHOWPLAYINGVOICE, packet.LocalId); StartCoroutine(AudioSystem.Instance.PlayEffectAudio(packet.Clip)); yield return(new WaitForSeconds(packet.Clip.length)); ApplicationFacade.Instance.SendNotification(NotificationConstant.MEDI_BATTLEVIEW_HIDENPLAYINGVOICE, packet.LocalId); }
/// <summary> /// 更新函数 /// </summary> public void Update() { if (recorderFlag) { int recordingPos = Microphone.GetPosition(device); audioLength = (float)recordingPos / (float)recordFrequency; //满一次指定采样数就发送一次 while (sampleIndex + recorderSampleSize <= recordingPos) { if (recorderFlag) { OnResample(); } //#if DEBUG //Debug.Log("录音:" + recordingPos); //#endif } //设置录音上限 if (audioLength >= MaxAudioLength) { //录音停止 if (recorderFlag) { StopRecording(); } } } if (AudioCacheDic == null) { AudioCacheDic = new SortedDictionary <int, AudioCache>(); } //缓存检测 if (AudioCacheDic.Count > 0) { var item = AudioCacheDic.First(); //录音取消或达到最大录音上限 if (item.Value.flag == 2 || item.Value.flag != -1 && (Time.time - item.Value.lastUpdateTime) >= MaxCacheLife) { AudioCacheDic.Remove(item.Key); } //读取已完成的数据 if (item.Value.flag == 1 && OnGetComplete != null) { AudioPacket packet = new AudioPacket(); packet.LocalId = item.Value.id; packet.AudioLength = item.Value.audioLength; packet.Clip = AudioClip.Create("speak: " + item.Key, (int)(item.Value.audioLength * Frequency), 1, Frequency, false); packet.Clip.SetData(item.Value.data.ToArray(), 0); item.Value.Reset(); OnGetComplete(packet); } } }
public void CmdSendAudio(AudioPacket audio) { foreach (var connection in NetworkServer.connections) { if (connection.Value != connectionToClient) { TargetPlayAudio(connection.Value, audio); } } }
AudioPacket GetAudioPacket(PbAudioPacket packet) { AudioPacket aduio = new AudioPacket(); aduio.Position = packet.Position; aduio.Length = packet.Length; aduio.Data = packet.Data.ToByteArray(); aduio.Timestamp = packet.Timestamp; return(aduio); }
AudioPacket GetExistingPacket() { if (_packets.Count > 0) { AudioPacket p = _packets [0]; _packets.RemoveAt(0); return(p); } return(null); }
private void OnNetworkPreparePacket(IPEndPoint remote, byte[] bytes, ref int offset, int count) { var packet = new AudioPacket(); if (!packet.TryUnpack(bytes, ref offset, count)) { return; } Received?.Invoke(remote, packet); }
AudioPacket CreatePacket() { if (_graveYard.Count > 0) { AudioPacket p = _graveYard [0]; _graveYard.RemoveAt(0); p.startIndex = 0; return(p); } return(new AudioPacket()); }
PbAudioPacket GetPbAudioPacket(AudioPacket audio) { PbAudioPacket pbPacket = new PbAudioPacket(); pbPacket.Id = audio.Id; pbPacket.Position = audio.Position; pbPacket.Length = audio.Length; pbPacket.Data = ByteString.CopyFrom(audio.Data); pbPacket.Timestamp = audio.Timestamp; return(pbPacket); }
public void AddAudioPacket(float[] data, int startIndex, int channels) { AudioPacket packet = CreatePacket(); packet.data = data; packet.channelsCount = channels; packet.startIndex = startIndex; lock (_dataMutex) { _packets.Add(packet); PacketsCount = _packets.Count; } }
private void OnTransportReceived(byte[] bytes, ref int offset, int count) { var packet = new AudioPacket(); while (packet.TryUnpack(bytes, ref offset, count)) { if (!_routes.TryGetValue(packet.RouteId, out AudioRoute route)) { continue; } route.Handle(packet); } }
/// <summary> /// Send audio data /// </summary> void SendAudio() { //Get audio data by SDK AudioPacket packet = UnityChatSDK.Instance.GetAudio(); if (packet != null) { packet.Id = ChatManager.Instance.UserID; byte[] audio = GetPbAudioPacket(packet).ToByteArray(); switch (NetType) { case NetType.UdpStream: //UDP Send data to server if (audio != null) { UdplDataModel model = new UdplDataModel(); model.Request = UdpRequest.REQUEST_AUDIO; IMInfo info = new IMInfo(); info.UserID = ChatManager.Instance.UserID; info.CallID = ChatManager.Instance.CallID; info.UserList.Add(ChatManager.Instance.ChatPeers); model.ChatInfoData = info.ToByteArray(); model.ChatData = audio; UdpSocketManager.Instance.Send(UdpMessageCodec.Encode(model)); } break; case NetType.TcpStream: if (audio != null) { DataModel model = new DataModel(); model.Type = ChatProtocolType.TYPE_CHATDATA; model.Request = ChatDataProtocol.CHAT_AUDIO; model.Message = audio; ChatNetworkManager.Instance.Send(model); } break; default: break; } } }
public void DataAvailable(object sender, WaveInEventArgs e) { if (isRecording && udpClient != null) { AudioPacket packet = new AudioPacket() { AudioData = new byte[e.BytesRecorded], StartTicks = (StartTime + TimeSpan.FromSeconds((double)bytesSent / (Constants.Audio.BIT_DEPTH / 8 * Constants.Audio.CHANNELS) / Constants.Audio.SAMPLE_RATE)).Ticks }; Buffer.BlockCopy(e.Buffer, 0, packet.AudioData, 0, e.BytesRecorded); MemoryStream ms = new MemoryStream(); Serializer.Serialize(ms, packet); udpClient.Send(ms.ToArray(), (int)ms.Length, remoteEndPoint); bytesSent += e.BytesRecorded; } }
public void UpdateSoundSamples(AudioPacket sound) { if (!audioSource.isPlaying) { InitialiseAudioSource(); } audioSource.clip.SetData(sound.samples, lastSamplePlayed); if (!audioSource.isPlaying) { audioSource.PlayDelayed(0.1f); } lastSamplePlayed = (lastSamplePlayed + sound.samples.Length) % MicrophoneSettings.MaxAudioClipSamples; }
public void Write(ArraySegment <byte> bytes) { // TODO thread queued. var compressed = _codec.Encode(bytes); var packet = new AudioPacket { Mark = _first, SequenceId = ++_sequenceId, RouteId = Id, Payload = compressed, }; _first = false; _transport.Send(packet); }
public void Pack_Correct_Success() { // Arrange var expected = new byte[] { 14, 0, 100, 0, 0, 0, 1, 200, 0, 0, 0, 1, 2, 3 }; _packet = new AudioPacket { RouteId = 100, Mark = true, SequenceId = 200, Payload = new byte[] { 1, 2, 3 }, }; // Act _packetBytes = _packet.Pack().ToArray(); // Assert CollectionAssert.AreEqual(expected, _packetBytes); }
byte[] GetPbAudioPacket(AudioPacket packet) { //you need to do //coding packet to bytes by google.protobuf/protobufNet... //use XmlSerializer for testing,not a good choice using (MemoryStream memorry = new MemoryStream()) { try { new XmlSerializer(typeof(AudioPacket)).Serialize(memorry, packet); return(memorry.ToArray()); } catch (Exception e) { return(null); } } }
private void Update() { //test== //todo send audio and video via your network refer to chatDataHandler.cs if (living) { VideoPacket videoPacket = UnityChatSDK.Instance.GetVideo(); AudioPacket audioPacket = UnityChatSDK.Instance.GetAudio(); if (videoPacket != null && videoPacket.Data != null) { DecodeVideoData(videoPacket); } if (audioPacket != null && audioPacket.Data != null) { DecodeAudiooData(audioPacket); } } //==test }
private void ForwardBuffToListeners() { AudioPacket packet = new AudioPacket() { sampleByteSize = 2, sampleRate = this.sampleRate, data = new byte[this.maxPacketLen] }; while (this.recording == true) { while (this.streamBuff.BufferedBytes >= this.maxPacketLen) { this.streamBuff.Read(packet.data, 0, this.maxPacketLen); this.audioStream.AddPacket(packet); } Thread.Sleep(this.waveIn.BufferMilliseconds / 3); } }
//==================send data======================== /// <summary> /// send audio data /// </summary> void SendAudio() { //capture audio data by SDK AudioPacket packet = UnityChatSDK.Instance.GetAudio(); if (packet != null) { packet.Id = TestUid;//use your userID byte[] audio = GetAudioPacketData(packet); if (audio != null) { //send data through your own network,such as TCP,UDP,P2P,Webrct,Unet,Photon...,the demo uses UDP for testing. SendDataByYourNetwork(audio); //On receiving audio data,just for testing ReceivedAudioDataQueue.Enqueue(audio); } } }
public void GetAudio_ActiveCall_SendOnRoutes() { // Arrange CallInvite_CallingState_InActive(); _coreTests.Authorization.TryGet("User1", out var user1); _coreTests.Authorization.TryGet("User2", out var user2); var route1 = new IPEndPoint(user1.Remote.Address, 8888); var sended1 = new AudioPacket { RouteId = 1, SequenceId = 100, Payload = new byte[] { 1 }, } .Pack(); var route2 = new IPEndPoint(user2.Remote.Address, 7777); var sended2 = new AudioPacket { RouteId = 1, SequenceId = 200, Payload = new byte[] { 2 }, } .Pack(); var received2 = sended1; var received1 = sended2; _coreTests.ExpectedEvent.Add(new TestEvent(route2, received2)); _coreTests.ExpectedEvent.Add(new TestEvent(route1, received1)); // Act _coreTests.NetworkMoq.Raise(s => s.PreparePacket += null, route1, sended1.Array, sended1.Offset, sended1.Count); _coreTests.NetworkMoq.Raise(s => s.PreparePacket += null, route2, sended2.Array, sended2.Offset, sended2.Count); // Assert CollectionAssert.AreEqual(_coreTests.ExpectedEvent, _coreTests.ActualEvent); }
public void Start() { Task.Run(async() => { using (UdpClient udpClient = new UdpClient(Constants.Network.PORT)) { //IPEndPoint localEp = new IPEndPoint(GetLocalIps().First(), Constants.Network.PORT); //udpClient.Client.Bind(localEp); IPAddress multicastAddr = IPAddress.Parse(Host); udpClient.JoinMulticastGroup(multicastAddr); while (true)//TODO: can stop { var data = await udpClient.ReceiveAsync(); AudioPacket audioPacket = Serializer.Deserialize <AudioPacket>(new MemoryStream(data.Buffer)); DateTime sampleStartTime = new DateTime(audioPacket.StartTicks); long targetSample = (long)((sampleStartTime - zeroPositionTime - LocalJitter + Delay - TimeSpan.FromMilliseconds(LocalClockOffset)).TotalSeconds * Constants.Audio.SAMPLE_RATE); long targetPosition = targetSample * Constants.Audio.BIT_DEPTH / 8 * Constants.Audio.CHANNELS; WavePlayer.AddAudioData(targetPosition, audioPacket.AudioData); } } }); }
void Update() { // You can utilize VideoChat.receivedAudioPackets and VideoChat.receivedVideoPackets to save/record AV data coming over the network // Otherwise, this clears those packets (not recording) // Comment this out or add conditional logic to control the recording process and then do something interesting with those lists of packets VideoChat.ClearReceivedPackets(); if (Input.mousePosition.x == lastMousePosition.x && Input.mousePosition.y == lastMousePosition.y) { mouseStillCount++; if (mouseStillCount > mouseStillThreshold) { UI = false; } } else { mouseStillCount = 0; UI = true; } lastMousePosition = Input.mousePosition; if (Input.GetKey(KeyCode.Escape)) { Application.Quit(); } VideoChat.PreVideo(); if (!testMode) { VideoChat.ClearAudioOut(); VideoChat.PostVideo(); return; } #region AUDIO if (testMode) { VideoChat.audioThreshold = audioThreshold; } VideoChat.SetEchoCancellation(echoCancellation); //Collect source audio, this will create a new AudioPacket and add it to the audioPackets list in the VideoChat static class VideoChat.FromAudio(); //Send the latest VideoChat audio packet for a local test or your networking library of choice, in this case Unity Networking int numPackets = VideoChat.audioPackets.Count; AudioPacket[] tempAudioPackets = new AudioPacket[numPackets]; VideoChat.audioPackets.CopyTo(tempAudioPackets); for (int i = 0; i < numPackets; i++) { AudioPacket currentPacket = tempAudioPackets[i]; if (testMode) { ReceiveAudio(currentPacket.position, currentPacket.length, currentPacket.data, currentPacket.timestamp); //Test mode just plays back on one machine } VideoChat.audioPackets.Remove(tempAudioPackets[i]); } #endregion #region VIDEO //Collect source video, this will create a new VideoPacket(s) and add it(them) to the videoPackets list in the VideoChat static class VideoChat.FromVideo(); numPackets = VideoChat.videoPackets.Count > VideoChat.packetsPerFrame ? VideoChat.packetsPerFrame : VideoChat.videoPackets.Count; VideoPacket[] tempVideoPackets = new VideoPacket[VideoChat.videoPackets.Count]; VideoChat.videoPackets.CopyTo(tempVideoPackets); //Send the latest VideoChat video packets for a local test or your networking library of choice, in this case Unity Networking for (int i = 0; i < numPackets; i++) { VideoPacket currentPacket = tempVideoPackets[i]; if (testMode) { ReceiveVideo(currentPacket.x, currentPacket.y, currentPacket.data, currentPacket.timestamp); //Test mode just displays on one machine } VideoChat.videoPackets.Remove(tempVideoPackets[i]); } VideoChat.PostVideo(); #endregion }
//todo send audio and video via your network refer to chatDataHandler.cs void SendAudio() { AudioPacket packet = UnityChatSDK.Instance.GetAudio(); }
public void DecodeAudiooData(AudioPacket audioPacket) { UnityChatSDK.Instance.DecodeAudioData(audioPacket.Id, audioPacket); }
byte[] GetAudioPacketData(AudioPacket packet) { //you can codec packet by google.protobuf/protobufNet...(the demo used google.protobuf) return(ObjectToBytes(packet)); }
void Update() { // You can utilize VideoChat.receivedAudioPackets and VideoChat.receivedVideoPackets to save/record AV data coming over the network // Otherwise, this clears those packets (not recording) // Comment this out or add conditional logic to control the recording process and then do something interesting with those lists of packets VideoChat.ClearReceivedPackets(); if (Input.mousePosition.x == lastMousePosition.x && Input.mousePosition.y == lastMousePosition.y) { mouseStillCount++; if (mouseStillCount > mouseStillThreshold) { UI = false; } } else { mouseStillCount = 0; UI = true; } lastMousePosition = Input.mousePosition; if (Input.GetKey(KeyCode.Escape)) { Application.Quit(); } //This is new in version 1.004, initializes things early for thumbnail VideoChat.PreVideo(); if ((!testMode && Network.peerType == NetworkPeerType.Disconnected) || (Network.peerType != NetworkPeerType.Disconnected && Network.connections.Length < 1)) { VideoChat.PostVideo(); return; } if (oneToManyBroadcast) { if ((!testMode && Network.peerType != NetworkPeerType.Server) || (Network.peerType == NetworkPeerType.Server && Network.connections.Length < 1)) { VideoChat.PostVideo(); return; } } #region AUDIO VideoChat.audioThreshold = audioThreshold; //Collect source audio, this will create a new AudioPacket and add it to the audioPackets list in the VideoChat static class VideoChat.FromAudio(); //Send the latest VideoChat audio packet for a local test or your networking library of choice, in this case Unity Networking int numPackets = VideoChat.audioPackets.Count; AudioPacket[] tempAudioPackets = new AudioPacket[numPackets]; VideoChat.audioPackets.CopyTo(tempAudioPackets); for (int i = 0; i < numPackets; i++) { AudioPacket currentPacket = tempAudioPackets[i]; if (testMode) { ReceiveAudio(currentPacket.position, currentPacket.length, currentPacket.data, System.Convert.ToString(currentPacket.timestamp)); //Test mode just plays back on one machine } else { audioView.RPC("ReceiveAudio", RPCMode.Others, currentPacket.position, currentPacket.length, currentPacket.data, System.Convert.ToString(currentPacket.timestamp)); //Unity Networking } VideoChat.audioPackets.Remove(tempAudioPackets[i]); } #endregion #region VIDEO Network.sendRate = (int)(VideoChat.packetsPerFrame + ((1 / Time.fixedDeltaTime) / 10)); //Collect source video, this will create a new VideoPacket(s) and add it(them) to the videoPackets list in the VideoChat static class VideoChat.FromVideo(); numPackets = VideoChat.videoPackets.Count > VideoChat.packetsPerFrame ? VideoChat.packetsPerFrame : VideoChat.videoPackets.Count; VideoPacket[] tempVideoPackets = new VideoPacket[VideoChat.videoPackets.Count]; VideoChat.videoPackets.CopyTo(tempVideoPackets); //Send the latest VideoChat video packets for a local test or your networking library of choice, in this case Unity Networking for (int i = 0; i < numPackets; i++) { VideoPacket currentPacket = tempVideoPackets[i]; if (testMode) { ReceiveVideo(currentPacket.x, currentPacket.y, currentPacket.data, System.Convert.ToString(currentPacket.timestamp)); //Test mode just displays on one machine } else { videoView.RPC("ReceiveVideo", RPCMode.Others, currentPacket.x, currentPacket.y, currentPacket.data, System.Convert.ToString(currentPacket.timestamp)); //Unity Networking } VideoChat.videoPackets.Remove(tempVideoPackets[i]); } VideoChat.PostVideo(); #endregion }