コード例 #1
0
    public void SetData(byte[] data, string id, Action <string> end)
    {
        this.id = id;
        onEnd   = end;
        ac      = this.gameObject.AddComponent <AudioSource> ();
        if (data == null)
        {
            this.Clear();
            return;
        }
//		byte[] bs = Tools.Decompress (data);
//		float[] da = new float[bs.Length / 4];
//		for (var i = 0; i < da.Length; i++)
//		{
//			da [i] = BitConverter.ToSingle (bs, i * 4);
//		}
//		ac.clip = AudioClip.Create ("clip", da.Length, 1, frequency, false);
//		ac.clip.SetData (da, 0);
        ac.clip = USpeakAudioClipCompressor.DecompressAudioClip(data, 0, 1, false, 1.0f);
        ac.mute = false;
        ac.Play();
        //倒计时 清理语音
        clearTimer = TimerManager.inst.Add(ac.clip.length + 1, 1, (float time) => {
            if (isClear)
            {
                return;
            }
            this.onEnd(this.id);
            this.Clear();
        });
    }
コード例 #2
0
    /// <summary>
    /// Decode and buffer audio data to be played
    /// </summary>
    /// <param name="data">The data passed to USpeakOnSerializeAudio()</param>
    public void ReceiveAudio(byte[] data)
    {
        if (settings == null)
        {
            Debug.LogWarning("Trying to receive remote audio data without calling InitializeSettings!\nIncoming packet will be ignored");
        }

        if (MuteAll || Mute || (SpeakerMode == SpeakerMode.Local && !DebugPlayback))
        {
            return;
        }

        if (SpeakerMode == SpeakerMode.Remote)
        {
            talkTimer = 1.0f;
        }

        int offset = 0;

        while (offset < data.Length)
        {
            int    len   = System.BitConverter.ToInt32(data, offset);
            byte[] frame = new byte[len + 6];
            System.Array.Copy(data, offset, frame, 0, frame.Length);

            USpeakFrameContainer cont = default(USpeakFrameContainer);
            cont.LoadFrom(frame);
            playBuffer.Add(USpeakAudioClipCompressor.DecompressAudioClip(cont.encodedData, (int)cont.Samples, 1, false, settings.bandMode, RemoteGain));

            offset += frame.Length;
        }
    }
コード例 #3
0
    private byte[] GetData()
    {
        if (ac.clip == null)
        {
            return(null);
        }
        ac.Stop();
//		float[] samples = new float[ac.clip.samples * ac.clip.channels];
//		ac.clip.GetData (samples, 0);
//		byte[] bs = new byte[samples.Length * 4];
//
//		byte[] every;
//		for (var i = 0; i < samples.Length; i++)
//		{
//			every = BitConverter.GetBytes (samples [i]);
//			for (var j = 0; j < every.Length; j++)
//			{
//				bs [i * 4 + j] = every [j];
//			}
//		}
////		Log.debug ("Zip1  - " + bs.Length);
//		byte[] outs = Tools.Compress (bs);
////		Log.debug ("Zip2 - " + outs.Length);
//		return outs;
        int s;

        byte[] b = USpeakAudioClipCompressor.CompressAudioClip(ac.clip, out s, 1.0f);
        return(b);
    }
コード例 #4
0
ファイル: MyLocalUSpeakSender.cs プロジェクト: liuhaili/Chess
    public ulong PlaySound(byte[] data)
    {
        List <AudioClip> clipList = new List <AudioClip>();
        int offset = 0;

        while (offset < data.Length)
        {
            int    len   = System.BitConverter.ToInt32(data, offset);
            byte[] frame = new byte[len + 6];
            System.Array.Copy(data, offset, frame, 0, frame.Length);

            USpeakFrameContainer cont = default(USpeakFrameContainer);
            cont.LoadFrom(frame);

            AudioClip clip = USpeakAudioClipCompressor.DecompressAudioClip(cont.encodedData, (int)cont.Samples, 1, false, BandMode.Narrow, 1);
            //GetComponent<AudioSource>().clip = clip;
            //GetComponent<AudioSource>().Play();
            clipList.Add(clip);
            offset += frame.Length;
        }

        ulong delay       = 0;
        ulong totalLength = 0;

        foreach (AudioClip clip in clipList)
        {
            USpeakAudioManager.PlayClipAtPoint(clip, transform.position, delay, false);
            delay       += (uint)((44100.0f / (float)8000) * ((uint)clip.samples));
            totalLength += (uint)clip.samples;
        }
        return((ulong)(totalLength * (float)8000 / 44100.0f) / 1000);
    }
コード例 #5
0
ファイル: USpeaker.cs プロジェクト: Virobeast2/RCLIENT
    private void ProcessPendingEncode(float[] pcm)
    {
        int num;

        byte[] buffer             = USpeakAudioClipCompressor.CompressAudioData(pcm, 1, out num, this.lastBandMode, this.codecMgr.Codecs[this.lastCodec], LocalGain);
        USpeakFrameContainer item = new USpeakFrameContainer {
            Samples     = (ushort)num,
            encodedData = buffer
        };

        this.sendBuffer.Add(item);
    }
コード例 #6
0
ファイル: WSpeak.cs プロジェクト: pikaqiufk/Client
    void ProcessPendingEncode(float[] pcm)
    {
        // encode data and add it to the send buffer
        int s;

        byte[] b = USpeakAudioClipCompressor.CompressAudioData(pcm, 1, out s, bandWidthMode, LocalGain);

        USpeakFrameContainer cont = default(USpeakFrameContainer);

        cont.Samples     = (ushort)s;
        cont.encodedData = b;
        sendBuffer.Add(cont);
    }
コード例 #7
0
ファイル: USpeaker.cs プロジェクト: sknchan/LegacyRust
    private void ProcessPendingEncode(float[] pcm)
    {
        int num;

        byte[] numArray = USpeakAudioClipCompressor.CompressAudioData(pcm, 1, out num, this.lastBandMode, this.codecMgr.Codecs[this.lastCodec], USpeaker.LocalGain);
        USpeakFrameContainer uSpeakFrameContainer = new USpeakFrameContainer()
        {
            Samples     = (ushort)num,
            encodedData = numArray
        };

        this.sendBuffer.Add(uSpeakFrameContainer);
    }
コード例 #8
0
ファイル: USpeaker.cs プロジェクト: sknchan/LegacyRust
 public void ReceiveAudio(byte[] data)
 {
     byte[] num = null;
     if (this.settings == null)
     {
         UnityEngine.Debug.LogWarning("Trying to receive remote audio data without calling InitializeSettings!\nIncoming packet will be ignored");
         return;
     }
     if (USpeaker.MuteAll || this.Mute || this.SpeakerMode == SpeakerMode.Local && !this.DebugPlayback)
     {
         return;
     }
     if (this.SpeakerMode == SpeakerMode.Remote)
     {
         this.talkTimer = 1f;
     }
     for (int i = 0; i < (int)data.Length; i = i + (int)num.Length)
     {
         int num1 = BitConverter.ToInt32(data, i);
         num = USpeakPoolUtils.GetByte(num1 + 6);
         Array.Copy(data, i, num, 0, (int)num.Length);
         USpeakFrameContainer uSpeakFrameContainer = new USpeakFrameContainer();
         uSpeakFrameContainer.LoadFrom(num);
         USpeakPoolUtils.Return(num);
         float[]  singleArray = USpeakAudioClipCompressor.DecompressAudio(uSpeakFrameContainer.encodedData, (int)uSpeakFrameContainer.Samples, 1, false, this.settings.bandMode, this.codecMgr.Codecs[this.Codec], USpeaker.RemoteGain);
         float    length      = (float)((int)singleArray.Length) / (float)this.audioFrequency;
         USpeaker uSpeaker    = this;
         uSpeaker.received = uSpeaker.received + (double)length;
         Array.Copy(singleArray, 0, this.receivedData, this.index, (int)singleArray.Length);
         USpeakPoolUtils.Return(singleArray);
         USpeaker length1 = this;
         length1.index = length1.index + (int)singleArray.Length;
         if (this.index >= base.audio.clip.samples)
         {
             this.index = 0;
         }
         base.audio.clip.SetData(this.receivedData, 0);
         if (!base.audio.isPlaying)
         {
             this.shouldPlay = true;
             if (this.playDelay <= 0f)
             {
                 this.playDelay = length * 2f;
             }
         }
     }
 }
コード例 #9
0
ファイル: USpeaker.cs プロジェクト: Virobeast2/RCLIENT
 public void ReceiveAudio(byte[] data)
 {
     if (this.settings == null)
     {
         UnityEngine.Debug.LogWarning("Trying to receive remote audio data without calling InitializeSettings!\nIncoming packet will be ignored");
     }
     else if ((!MuteAll && !this.Mute) && ((this.SpeakerMode != SpeakerMode.Local) || this.DebugPlayback))
     {
         byte[] @byte;
         if (this.SpeakerMode == SpeakerMode.Remote)
         {
             this.talkTimer = 1f;
         }
         for (int i = 0; i < data.Length; i += @byte.Length)
         {
             @byte = USpeakPoolUtils.GetByte(BitConverter.ToInt32(data, i) + 6);
             Array.Copy(data, i, @byte, 0, @byte.Length);
             USpeakFrameContainer container = new USpeakFrameContainer();
             container.LoadFrom(@byte);
             USpeakPoolUtils.Return(@byte);
             float[] sourceArray = USpeakAudioClipCompressor.DecompressAudio(container.encodedData, container.Samples, 1, false, this.settings.bandMode, this.codecMgr.Codecs[this.Codec], RemoteGain);
             float   num3        = ((float)sourceArray.Length) / ((float)this.audioFrequency);
             this.received += num3;
             Array.Copy(sourceArray, 0, this.receivedData, this.index, sourceArray.Length);
             USpeakPoolUtils.Return(sourceArray);
             this.index += sourceArray.Length;
             if (this.index >= base.audio.clip.samples)
             {
                 this.index = 0;
             }
             base.audio.clip.SetData(this.receivedData, 0);
             if (!base.audio.isPlaying)
             {
                 this.shouldPlay = true;
                 if (this.playDelay <= 0f)
                 {
                     this.playDelay = num3 * 2f;
                 }
             }
         }
     }
 }
コード例 #10
0
ファイル: USpeaker.cs プロジェクト: liuhaili/Chess
    //Called when new audio data is available from the microphone
    void OnAudioAvailable(float[] pcmData)
    {
        //encode the data, add it to the send buffer
        //audio data is flushed from the send buffer on a user-configurable timer, to avoid flooding the network

        AudioClip temp = AudioClip.Create("temp", pcmData.Length, 1, audioFrequency, false, false);

        temp.SetData(pcmData, 0);

        int s;

        byte[] b = USpeakAudioClipCompressor.CompressAudioClip(temp, out s, BandwidthMode, LocalGain);

        USpeakFrameContainer cont = default(USpeakFrameContainer);

        cont.Samples     = (ushort)s;
        cont.encodedData = b;

        sendBuffer.Add(cont);
    }
コード例 #11
0
    /// <summary>
    /// Decode and buffer audio data to be played
    /// </summary>
    /// <param name="data">The data passed to USpeakOnSerializeAudio()</param>
    public void ReceiveAudio(byte[] data)
    {
        if (settings == null)
        {
            Debug.LogWarning("Trying to receive remote audio data without calling InitializeSettings!\nIncoming packet will be ignored");
            return;
        }

        if (MuteAll || Mute || (SpeakerMode == SpeakerMode.Local && !DebugPlayback))
        {
            return;
        }

        if (SpeakerMode == SpeakerMode.Remote)
        {
            talkTimer = 1.0f;
        }

        int offset = 0;

        while (offset < data.Length)
        {
            int    len   = System.BitConverter.ToInt32(data, offset);
            byte[] frame = USpeakPoolUtils.GetByte(len + 6);
            System.Array.Copy(data, offset, frame, 0, frame.Length);

            USpeakFrameContainer cont = default(USpeakFrameContainer);
            cont.LoadFrom(frame);

            USpeakPoolUtils.Return(frame);

            float[] sample = USpeakAudioClipCompressor.DecompressAudio(cont.encodedData, (int)cont.Samples, 1, false, settings.bandMode, codecMgr.Codecs[Codec], RemoteGain);

            float sampleTime = ((float)sample.Length / (float)audioFrequency);
            received += sampleTime;

            System.Array.Copy(sample, 0, receivedData, index, sample.Length);

            USpeakPoolUtils.Return(sample);

            // advance the write position into the audio clip
            index += sample.Length;

            // if the write position extends beyond the clip length, wrap around
            if (index >= audio.clip.samples)
            {
                index = 0;
            }

            // write received data to audio clip
            audio.clip.SetData(receivedData, 0);

            // not already playing audio, schedule audio to be played
            if (!audio.isPlaying)
            {
                shouldPlay = true;

                //Debug.Log( "Started receiving at time: " + Time.time );

                // no play delay set, advance play delay to allow more data to arrive (deal with network latency)
                if (playDelay <= 0)
                {
                    playDelay = sampleTime * 5f;
                }
            }

            offset += frame.Length;
        }
    }