/// <summary> /// Is called when wave-in has received new audio frame. /// </summary> /// <param name="sender">Sender.</param> /// <param name="e">Event data.</param> private void m_pWaveIn_AudioFrameReceived(object sender, EventArgs <byte[]> e) { try{ // We don't have RTP timestamp base or time stamp recycled. if (m_RtpTimeStamp == 0 || m_RtpTimeStamp > m_pRTP_Stream.Session.RtpClock.RtpTimestamp) { m_RtpTimeStamp = m_pRTP_Stream.Session.RtpClock.RtpTimestamp; } // Some sample block missing or silence suppression. // Don't work ... need some more investigation. //else if((m_pRTP_Stream.Session.RtpClock.RtpTimestamp - m_RtpTimeStamp) > 2 * m_pRTP_Stream.Session.RtpClock.MillisecondsToRtpTicks(m_AudioFrameSize)){ // m_RtpTimeStamp = m_pRTP_Stream.Session.RtpClock.RtpTimestamp; //} else { m_RtpTimeStamp += (uint)m_pRTP_Stream.Session.RtpClock.MillisecondsToRtpTicks(m_AudioFrameSize); } if (m_pActiveCodec != null) { RTP_Packet rtpPacket = new RTP_Packet(); rtpPacket.Data = m_pActiveCodec.Encode(e.Value, 0, e.Value.Length); rtpPacket.Timestamp = m_RtpTimeStamp; m_pRTP_Stream.Send(rtpPacket); } } catch (Exception x) { if (!this.IsDisposed) { // Raise error event(We can't throw expection directly, we are on threadpool thread). OnError(x); } } }
protected void SendAudio(byte[] bUncompressedAudio) { short[] sUncompressedAudio = AudioClasses.Utils.ConvertByteArrayToShortArrayLittleEndian(bUncompressedAudio); RTPPacket[] packets = new RTPPacket[] { }; lock (SendLock) { packets = AudioCodec.Encode(sUncompressedAudio); } lock (SocketLock) { foreach (RTPPacket packet in packets) { FormatNextPacket(packet); byte[] bDataPacket = packet.GetBytes(); if (RTPUDPClient != null) { RTPUDPClient.SendUDP(bDataPacket, bDataPacket.Length, DestinationEndpoint); } } } }
// FixedUpdate void FixedUpdate() { if (GameManager.isServer) { return; } if (networkViewIsMine) { if (InputManager.instance.GetButton(pushToTalkButton)) { int pos = Microphone.GetPosition(null); int diff; if (pos >= lastPos) { diff = pos - lastPos; } else { diff = (microphoneClip.samples - lastPos) + pos; } if (diff >= sampleBufferSize) { //LogManager.General.Log(Time.time - lastSend + ": " + diff); samples = new float[sampleBufferSize]; // * microphoneClip.channels microphoneClip.GetData(samples, lastPos); // Noise removal if (idleDetectionEnabled) { int idleCounter = 0; for (int i = 0; i < samples.Length; i++) { float val = samples[i]; if (val < noiseLimit && val > -noiseLimit) { idleCounter++; } } // Don't send if it's completely calm if (idleCounter == samples.Length) { networkView.RPC("VoIPData", uLink.RPCMode.Server, new byte[0]); return; } } // Clamp for (int i = 0; i < samples.Length; i++) { samples[i] = Mathf.Clamp(samples[i] * volumeMultiplier, -1f, 1f); } codec.Encode(samples, (bytes, len) => { networkView.RPC("VoIPData", uLink.RPCMode.Server, bytes, len); OnVoIPData(bytes, len, null); }); //lastSend = Time.time; lastPos = pos - (diff - sampleBufferSize); if (lastPos < 0) { lastPos = microphoneClip.samples + lastPos; } } } else { visualizationSamples = null; audioSources[0].Pause(); lastPos = Microphone.GetPosition(null); } } else { if (!speaker.isPlaying) { visualizationSamples = null; audioSources[0].Pause(); } } visualizationTime += Time.deltaTime * sendPacketFrequency; }