Esempio n. 1
0
        public void PushAudioFrame(float[] pcm, int channels = 1, int samplingRate = 48000)
        {
            long currentTimeMillis = (long)(DateTime.UtcNow - Jan1st1970).TotalMilliseconds;

            // Convert 32bit float PCM to 16bit PCM data bytes.
            var dataStream = new MemoryStream();

            for (int i = 0; i < pcm.Length; i++)
            {
                dataStream.Write(BitConverter.GetBytes(Convert.ToInt16(pcm[i] * Int16.MaxValue)), 0, sizeof(Int16));
            }
            var buffer = dataStream.ToArray();

            var audioFrame = new AudioFrame()
            {
                type           = AUDIO_FRAME_TYPE.FRAME_TYPE_PCM16,
                bytesPerSample = 2, // PCM16
                buffer         = buffer,
                channels       = channels,
                samples        = pcm.Length / channels,
                samplesPerSec  = samplingRate,
                renderTimeMs   = currentTimeMillis,
            };

            _RtcEngine.PushAudioFrame(audioFrame);
        }
    void PushAudioFrameThread()
    {
        var bytesPerSample = 2;
        var type           = AUDIO_FRAME_TYPE.FRAME_TYPE_PCM16;
        var channels       = CHANNEL;
        var samples        = SAMPLE_RATE / PUSH_FREQ_PER_SEC;
        var samplesPerSec  = SAMPLE_RATE;
        var buffer         = new byte[samples * bytesPerSample * CHANNEL];
        var freq           = 1000 / PUSH_FREQ_PER_SEC;

        var tic = new TimeSpan(DateTime.Now.Ticks);

        while (_pushAudioFrameThreadSignal)
        {
            if (!_startSignal)
            {
                tic = new TimeSpan(DateTime.Now.Ticks);
            }

            var toc = new TimeSpan(DateTime.Now.Ticks);

            if (toc.Subtract(tic).Duration().Milliseconds >= freq)
            {
                tic = new TimeSpan(DateTime.Now.Ticks);

                lock (audioBuffer)
                {
                    if (audioBuffer.Size > samples * bytesPerSample * CHANNEL)
                    {
                        for (var j = 0; j < samples * bytesPerSample * CHANNEL; j++)
                        {
                            buffer[j] = audioBuffer.Get();
                        }

                        var audioFrame = new AudioFrame
                        {
                            bytesPerSample = bytesPerSample,
                            type           = type,
                            samples        = samples,
                            samplesPerSec  = samplesPerSec,
                            channels       = channels,
                            buffer         = buffer,
                            renderTimeMs   = DateTime.Now.Ticks
                        };

                        mRtcEngine.PushAudioFrame(audioFrame);
                    }
                    else
                    {
                        AppendRingBuffer();
                    }
                }
            }
        }
    }
Esempio n. 3
0
    private void onJoinChannelSuccess(string channelName, uint uid, int elapsed)
    {
        mRect    = new Rect(0, 0, Screen.width, Screen.height);
        mTexture = new Texture2D((int)mRect.width, (int)mRect.height, TextureFormat.RGBA32, false);
        cutScreen();
        // ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame();
        // externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
        // externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_I420;
        // externalVideoFrame.buffer = new byte[1024];
        // mRtcEngine.PushVideoFrame(externalVideoFrame);
        logCallback("JoinChannelSuccessHandler: uid = " + uid);
        AudioFrame audioFrame = new AudioFrame();

        mRtcEngine.PushAudioFrame(audioFrame);
    }