Example #1
0
    public void start()
    {
        if (running)
        {
            Logging.warn("Audio player is already running.");
            return;
        }

        running = true;

        AudioAttributes aa = new AudioAttributes.Builder()
                             .SetContentType(AudioContentType.Speech)
                             .SetLegacyStreamType(Stream.VoiceCall)
                             .SetFlags(AudioFlags.LowLatency)
                             .SetUsage(AudioUsageKind.VoiceCommunication)
                             .Build();
        AudioFormat af = new AudioFormat.Builder()
                         .SetSampleRate(44100)
                         .SetChannelMask(ChannelOut.Mono)
                         .SetEncoding(Encoding.Pcm16bit)
                         .Build();

        audioPlayer = new AudioTrack(aa, af, AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, Encoding.Pcm16bit) * 100, AudioTrackMode.Stream, 0);

        audioPlayer.SetVolume(0.8f);

        audioPlayer.Play();
    }
Example #2
0
        public async Task PlayOnce(System.IO.Stream stream)
        {
            await Task.Run(() =>
            {
                try
                {
                    int sampleRate = 16000;
                    var channel    = ChannelOut.Mono;
                    var encoding   = Android.Media.Encoding.Pcm16bit;
                    var buffSize   = AudioTrack.GetMinBufferSize(sampleRate, channel, encoding);

                    if (_playOnceAudioTrack == null)
                    {
                        _playOnceAudioTrack = new AudioTrack(Stream.Music, sampleRate, channel, encoding, buffSize, AudioTrackMode.Stream);
                    }

                    _playOnceAudioTrack.Stop();
                    _playOnceAudioTrack.Flush();

                    var buffer = new byte[stream.Length];
                    stream.Read(buffer, 0, buffer.Length);

                    _playOnceAudioTrack.Play();
                    _playOnceAudioTrack.Write(buffer, 0, buffer.Length);
                }
                catch (Exception ex)
                {
                }
            });
        }
Example #3
0
    private void initPlayer()
    {
        Encoding encoding = Encoding.Pcm16bit;

        // Prepare player
        AudioAttributes aa = new AudioAttributes.Builder()
                             .SetContentType(AudioContentType.Speech)
                             .SetLegacyStreamType(Stream.VoiceCall)
                             .SetFlags(AudioFlags.LowLatency)
                             .SetUsage(AudioUsageKind.VoiceCommunication)
                             .Build();

        AudioFormat af = new AudioFormat.Builder()
                         .SetSampleRate(44100)
                         .SetChannelMask(ChannelOut.Mono)
                         .SetEncoding(encoding)
                         .Build();

        bufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, encoding) * 10;

        audioPlayer = new AudioTrack(aa, af, bufferSize, AudioTrackMode.Stream, 0);

        // TODO implement dynamic volume control
        AudioManager am = (AudioManager)MainActivity.Instance.GetSystemService(Context.AudioService);

        audioPlayer.SetVolume(am.GetStreamVolume(Stream.VoiceCall));

        audioPlayer.Play();
    }
Example #4
0
        public async void play(byte[] data)
        {
            if (audioTrack == null)
            {
                InitAudioTrack();
            }

            if (audioTrack.State != AudioTrackState.Initialized)
            {
                return;
            }

            int minBufSize = AudioTrack.GetMinBufferSize(sampleRate, channelOut, encoding);


            int bytes = audioTrack.Write(data, 0, data.Length);

            overallBytes += bytes;

            if (overallBytes >= minBufSize * 4)
            {
                overallBytes = 0;
                audioTrack.Play();
            }
        }
Example #5
0
    private void initPlayer()
    {
        Encoding encoding = Encoding.Pcm16bit;

        bufferSize = AudioTrack.GetMinBufferSize(sampleRate, ChannelOut.Mono, encoding);
        Logging.info("Min. buffer size " + bufferSize);
        int new_buffer_size = CodecTools.getPcmFrameByteSize(sampleRate, bitRate, channels) * 100;

        if (bufferSize < new_buffer_size)
        {
            bufferSize = (int)(Math.Ceiling((decimal)new_buffer_size / bufferSize) * bufferSize);
        }
        Logging.info("Final buffer size " + bufferSize);

        // Prepare player
        AudioAttributes aa = new AudioAttributes.Builder()
                             .SetContentType(AudioContentType.Speech)
                             .SetFlags(AudioFlags.LowLatency)
                             .SetUsage(AudioUsageKind.VoiceCommunication)
                             .Build();

        AudioFormat af = new AudioFormat.Builder()
                         .SetSampleRate(sampleRate)
                         .SetChannelMask(ChannelOut.Mono)
                         .SetEncoding(encoding)
                         .Build();

        audioPlayer = new AudioTrack(aa, af, bufferSize, AudioTrackMode.Stream, 0);

        MainActivity.Instance.VolumeControlStream = Stream.VoiceCall;

        audioPlayer.Play();
    }
Example #6
0
    public void start()
    {
        if (running)
        {
            Logging.warn("Audio recorder is already running.");
            return;
        }
        stopRecording = false;
        running       = true;

        bufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, Encoding.Pcm16bit) * 10;

        audioRecorder = new AudioRecord(
            // Hardware source of recording.
            AudioSource.Mic,
            // Frequency
            44100,
            // Mono or stereo
            ChannelIn.Mono,
            // Audio encoding
            Encoding.Pcm16bit,
            // Length of the audio clip.
            bufferSize
            );


        audioRecorder.StartRecording();

        Thread recordingThread = new Thread(readLoop);

        recordingThread.Start();
    }
Example #7
0
        /*********************************************************************************
        *
        *
        *********************************************************************************/
        public void ButtonPlay_Click(object sender, EventArgs e)
        {
            //String musicFolder = Android.OS.Environment.GetExternalStoragePublicDirectory(Android.OS.Environment.DirectoryMusic).Path;
            String filePath = mRecFolder + "/sample_mono_8k8bit.wav";

            //String filePath = musicFolder + "/sample_stereo_44k16bit.wav";

            System.Diagnostics.Debug.WriteLine(filePath);

            File            file        = new File(filePath);
            FileInputStream inputStream = new FileInputStream(file);

            // Streamモードで再生を行うので、リングバッファサイズを取得
            Int32 bufferSize = AudioTrack.GetMinBufferSize(mSamplingRate, ChannelOut.Mono, mFormat);

            System.Diagnostics.Debug.WriteLine("AudioTrack : GetMinBufferSize={0}", bufferSize);

            // Frame size
            TrackBuffer.Instance.Frames = mFrameSize;

            // AudioTrackを生成する
            mAudioTrack = new AudioTrack(
                Stream.Music,
                //Stream.VoiceCall,
                mSamplingRate,
                ChannelOut.Mono,
                mFormat,
                bufferSize,
                AudioTrackMode.Stream);

            // コールバックを指定
            mAudioTrack.SetPlaybackPositionUpdateListener(new OnPlaybackPositionUpdateListener());

            //通知の発生するフレーム数を指定
            mAudioTrack.SetPositionNotificationPeriod(TrackBuffer.Instance.Frames);

            TrackBuffer.Instance.Clear();

            Task.Run(() => {
                while (true)
                {
                    if (TrackBuffer.Instance.Count > 5)
                    {
                        break;
                    }
                }

                System.Diagnostics.Debug.WriteLine("AudioTrack play streaming data");
                mAudioTrack.Play();

                Byte[] wav = null;
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
            });
        }
Example #8
0
 protected void Init(MusicStream Wave)
 {
     _mode = (AudioTrackMode)Wave.Mode;
     if (_mode == AudioTrackMode.Static)
     {
         _player = new AudioTrack(
             // Stream type
             (Android.Media.Stream)Wave.Type,
             // Frequency
             Wave.SampleRate,
             // Mono or stereo
             (ChannelOut)Wave.Config,
             // Audio encoding
             (Encoding)Wave.Format,
             // Length of the audio clip.
             (int)Wave.SizeInBytes,
             // Mode. Stream or static.
             AudioTrackMode.Static);
     }
     else
     {
         _player = new AudioTrack(
             // Stream type
             (Android.Media.Stream)Wave.Type,
             // Frequency
             Wave.SampleRate,
             // Mono or stereo
             (ChannelOut)Wave.Config,
             // Audio encoding
             (Encoding)Wave.Format,
             // Length of the audio clip.
             _buffersize = AudioTrack.GetMinBufferSize(Wave.SampleRate,
                                                       (ChannelOut)Wave.Config, (Encoding)Wave.Format),
             // Mode. Stream or static.
             AudioTrackMode.Stream);
     }
     _duration = Wave.Duration;
     _loop     = Wave.Loop;
     _frames   = Wave.Samples;
     _player.SetVolume(_volume = Wave.Volume);
     _player.SetNotificationMarkerPosition(_frames * 31 / 32);
     if (_mode == AudioTrackMode.Static)
     {
         _player.Write(Wave.Content.ReadFully(true), 0, (int)Wave.Content.Length);
     }
     else
     {
         Set((sender, e) => { if (_loop)
                              {
                                  _player.Release(); Init(_Wave); Write();
                              }
                              ; });
         _content = Wave.Content.ReadFully(true);
     }
     _prepared = true;
 }
Example #9
0
 public SoundPlayer()
 {
     _minBufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Stereo, Encoding.Pcm16bit);
     _audioTrack    = new AudioTrack(Stream.Music, 44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit, _minBufferSize, AudioTrackMode.Stream);
     while (_audioTrack.State != AudioTrackState.Initialized)
     {
         ;
     }
     _audioTrack.Play();
 }
        //public void Write(byte[] bytes)
        //{
        //    /*
        //    foreach(System.IO.Stream temp in streamList)
        //    {
        //        try
        //        {
        //            temp.Write(bytes);
        //        }
        //        catch (System.IO.IOException ex)
        //        {
        //            System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
        //        }
        //    }
        //    *
        //    */

        //    try
        //    {
        //        mmOutStream.Write(bytes);
        //    }
        //    catch (System.IO.IOException ex)
        //    {
        //        System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
        //    }

        //}

        public void Write(System.IO.Stream stream)
        {
            /*
             * foreach(System.IO.Stream temp in streamList)
             * {
             *  try
             *  {
             *      temp.Write(bytes);
             *  }
             *  catch (System.IO.IOException ex)
             *  {
             *      System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
             *  }
             * }
             *
             */
            AudioTrack _output;

            int buffsize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit);
            //_output = new AudioTrack(Android.Media.Stream.Music, 44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit,
            //buffsize, AudioTrackMode.Stream);
            var AABuilder = new AudioAttributes.Builder();

            AABuilder.SetContentType(AudioContentType.Music);
            AABuilder.SetUsage(AudioUsageKind.Media);

            var AfBuilder = new AudioFormat.Builder();

            AfBuilder.SetSampleRate(44100);
            AfBuilder.SetEncoding(Android.Media.Encoding.Pcm16bit);
            AfBuilder.SetChannelMask(ChannelOut.Stereo);


            _output = new AudioTrack(AABuilder.Build(), AfBuilder.Build(), buffsize, AudioTrackMode.Stream, AudioManager.AudioSessionIdGenerate);
            _output.Play();
            try
            {
                byte[] buffer        = new byte[1000];
                int    bytesReturned = 1;

                while (bytesReturned > 0)
                {
                    bytesReturned = stream.Read(buffer, 0, buffer.Length);
                    mmOutStream.Write(buffer);
                    _output.Write(buffer, 0, buffer.Length);
                    //DependencyService.Get<BluetoothManager>().Write(buffer);
                }
                stream.Close();
            }
            catch (System.IO.IOException ex)
            {
                System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
            }
        }
        internal static void CreateAudioTracks()
        {
            const int audioMemoryOS          = 1024 * 1024; // the audio client have only this amount of memory available for streaming (see: AudioFlinger::Client constructor -> https://android.googlesource.com/platform/frameworks/av/+/126a630/services/audioflinger/AudioFlinger.cpp : line 1153)
            const int memoryDealerHeaderSize = 64;          // size taken by the header of each memory section of the MemoryDealer.

            GetSetArrayRegionFunctionPointer();

            // the minimum size that can have an audio track in streaming mode (with that audio format)
            var minimumBufferSize = AudioTrack.GetMinBufferSize(SoundEffectInstanceFrameRate, ChannelOut.Stereo, Encoding.Pcm16bit);

            // the size that should be kept in order to be able to play sound music correctly (note: we need to be able to play 2 music simultaneously because destruction is asynchronous)
            var memoryNeededForSoundMusic = 2 * (GetUpperPowerOfTwo(minimumBufferSize) + memoryDealerHeaderSize);

            // the size taken by one of our sub-buffers => 2 bytes (16 bits sample) * 2 channels * 30 ms at 44100Hz
            var subBufferSize = Math.Max((int)Math.Ceiling(minimumBufferSize / (float)NumberOfSubBuffersInAudioTrack), 2 * 2 * 8000);

            // the memory taken by one audio track creation for sound effects
            var memoryNeededAudioTrack = GetUpperPowerOfTwo(subBufferSize * NumberOfSubBuffersInAudioTrack);

            // the java buffer used to copy blank sound data
            blankJavaDataBuffer = JNIEnv.NewGlobalRef(JNIEnv.NewArray(new byte[subBufferSize]));

            // create the pool of audio tracks
            var trackNumber = 0;

            while (trackNumber < MaximumNumberOfTracks && audioMemoryOS - (trackNumber + 1) * memoryNeededAudioTrack >= memoryNeededForSoundMusic)
            {
                // create the audio track
                var audioTrack = new AudioTrack(Stream.Music, SoundEffectInstanceFrameRate, ChannelOut.Stereo, Encoding.Pcm16bit,
                                                NumberOfSubBuffersInAudioTrack * subBufferSize, AudioTrackMode.Stream);

                if (audioTrack.State == AudioTrackState.Uninitialized) // the maximum number of tracks is reached
                {
                    break;
                }

                // create the c# buffer for internal copy
                var dataBuffer = new byte[subBufferSize];

                // create the java buffer
                var javaDataBuffer = JNIEnv.NewGlobalRef(JNIEnv.NewArray(dataBuffer));

                // add the new track to the audio track pool
                var newTrackInfo = new TrackInfo(audioTrack, javaDataBuffer, dataBuffer, subBufferSize)
                {
                    BuffersToWrite = NumberOfSubBuffersInAudioTrack
                };
                audioTrackPool.Enqueue(newTrackInfo);

                ++trackNumber;
            }
        }
Example #12
0
    public void start(string codec)
    {
        if (running)
        {
            Logging.warn("Audio recorder is already running.");
            return;
        }
        running = true;

        AudioManager am = (AudioManager)MainActivity.Instance.GetSystemService(Context.AudioService);

        if (Build.VERSION.SdkInt < BuildVersionCodes.O)
        {
            focusListener = new AudioFocusListener();
#pragma warning disable CS0618 // Type or member is obsolete
            am.RequestAudioFocus(focusListener, Stream.VoiceCall, AudioFocus.GainTransient);
#pragma warning restore CS0618 // Type or member is obsolete
        }
        else
        {
            AudioAttributes aa = new AudioAttributes.Builder()
                                 .SetContentType(AudioContentType.Speech)
                                 .SetFlags(AudioFlags.LowLatency)
                                 .SetUsage(AudioUsageKind.VoiceCommunication)
                                 .Build();

            focusListener = new AudioFocusListener();

            focusRequest = new AudioFocusRequestClass.Builder(AudioFocus.GainTransient)
                           .SetAudioAttributes(aa)
                           .SetFocusGain(AudioFocus.GainTransient)
                           .SetOnAudioFocusChangeListener(focusListener)
                           .Build();
            am.RequestAudioFocus(focusRequest);
        }

        lock (outputBuffers)
        {
            outputBuffers.Clear();
        }

        bufferSize = AudioTrack.GetMinBufferSize(sampleRate, ChannelOut.Mono, Encoding.Pcm16bit);

        initEncoder(codec);
        initRecorder();

        recordThread = new Thread(recordLoop);
        recordThread.Start();

        senderThread = new Thread(senderLoop);
        senderThread.Start();
    }
Example #13
0
        void IDualityBackend.Init()
        {
            activeInstance = this;

            int bufferSize = AudioTrack.GetMinBufferSize(DefaultSampleRate, DefaultChannels, DefaultEncoding);

            masterTrack = new AudioTrack(Stream.Music, DefaultSampleRate, DefaultChannels, DefaultEncoding, bufferSize, AudioTrackMode.Stream);
            masterTrack.Play();

            bufferSizeSamples = bufferSize * sizeof(ushort);

            // Set up the streaming thread
            streamWorkerEnd           = false;
            streamWorkerQueue         = new RawList <NativeAudioSource>();
            streamWorkerQueueEvent    = new AutoResetEvent(false);
            streamWorker              = new Thread(ThreadStreamFunc);
            streamWorker.IsBackground = true;
            streamWorker.Start();
        }
        private AudioTrack GetAudioTrack()
        {
            ChannelOut channelOut = _channels == 2 ? ChannelOut.Stereo : ChannelOut.Mono;
            Encoding   encoding   = Encoding.Pcm16bit;;
            int        bufferSize = AudioTrack.GetMinBufferSize(_sampleRate, channelOut, encoding) * 2;

            AudioTrack audioTrack;

            AudioAttributes.Builder attributesBuilder = new AudioAttributes.Builder()
                                                        .SetUsage(AudioUsageKind.Game);
            AudioFormat format = new AudioFormat.Builder()
                                 .SetEncoding(encoding)
                                 .SetSampleRate(_sampleRate)
                                 .SetChannelMask(channelOut)
                                 .Build();

            if (Build.VERSION.SdkInt < BuildVersionCodes.O)
            {
                attributesBuilder.SetFlags(AudioFlags.LowLatency);
            }

            if (Build.VERSION.SdkInt >= BuildVersionCodes.O)
            {
                AudioTrack.Builder trackBuilder = new AudioTrack.Builder()
                                                  .SetAudioFormat(format)
                                                  .SetAudioAttributes(attributesBuilder.Build())
                                                  .SetTransferMode(AudioTrackMode.Stream)
                                                  .SetBufferSizeInBytes(bufferSize);

                trackBuilder.SetPerformanceMode(AudioTrackPerformanceMode.LowLatency);
                audioTrack = trackBuilder.Build();
            }
            else
            {
                audioTrack = new AudioTrack(attributesBuilder.Build(),
                                            format,
                                            bufferSize,
                                            AudioTrackMode.Stream,
                                            AudioManager.AudioSessionIdGenerate);
            }

            return(audioTrack);
        }
Example #15
0
        public StreamingSound(StreamingSource streamingSource, float volume = 1f, float pitch = 1f, float pan = 0f, bool isLooped = false, bool disposeOnStop = false, float bufferDuration = 0.3f)
        {
            VerifyStreamingSource(streamingSource);
            m_bufferDuration = MathUtils.Clamp(bufferDuration, 0f, 10f);
            ChannelOut channelConfig     = (streamingSource.ChannelsCount == 1) ? ChannelOut.FrontLeft : ChannelOut.Stereo;
            int        minBufferSize     = AudioTrack.GetMinBufferSize(streamingSource.SamplingFrequency, channelConfig, Encoding.Pcm16bit);
            int        bufferSizeInBytes = MathUtils.Max(CalculateBufferSize(m_bufferDuration), minBufferSize);

            m_audioTrack = new AudioTrack(Stream.Music, streamingSource.SamplingFrequency, channelConfig, Encoding.Pcm16bit, bufferSizeInBytes, AudioTrackMode.Stream);
            //m_audioTrack = new AudioTrack(new AudioAttributes.Builder().SetUsage(AudioUsageKind.Media).SetContentType(AudioContentType.Music).Build(), new AudioFormat(), bufferSizeInBytes, AudioTrackMode.Static, 0);

            Mixer.m_audioTracksCreated++;
            if (m_audioTrack.State == AudioTrackState.Uninitialized)
            {
                m_audioTrack.Release();
                m_audioTrack = null;
                Mixer.m_audioTracksDestroyed++;
                Log.Warning("Failed to create StreamingSound AudioTrack. Created={0}, Destroyed={1}", Mixer.m_audioTracksCreated, Mixer.m_audioTracksDestroyed);
            }
            StreamingSource        = streamingSource;
            base.ChannelsCount     = streamingSource.ChannelsCount;
            base.SamplingFrequency = streamingSource.SamplingFrequency;
            base.Volume            = volume;
            base.Pitch             = pitch;
            base.Pan           = pan;
            base.IsLooped      = isLooped;
            base.DisposeOnStop = disposeOnStop;
            if (m_audioTrack != null)
            {
                m_task = Task.Run(delegate
                {
                    try
                    {
                        StreamingThreadFunction();
                    }
                    catch (Exception message)
                    {
                        Log.Error(message);
                    }
                });
            }
        }
        public AsyncPlayer(SineWaveGenerator generator)
        {
            this.generator = generator;

            var streamType = Stream.Music;
            var sampleRate = AudioTrack.GetNativeOutputSampleRate(streamType);
            var encoding   = Android.Media.Encoding.Pcm16bit;
            var bufferSize = AudioTrack.GetMinBufferSize(sampleRate, ChannelOut.Mono, encoding) + 2;

            Log.Info("AudioInit", "{0} / {1}", sampleRate, bufferSize);

            soundTrack = new AudioTrack(streamType,
                                        sampleRate,
                                        ChannelConfiguration.Mono,
                                        encoding,
                                        bufferSize,
                                        AudioTrackMode.Stream);
            soundBuffer = new short [bufferSize];

            processThread = new Thread(ThreadStart);
        }
Example #17
0
        /*********************************************************************************
        *
        *
        *********************************************************************************/
        public void TestPlay()
        {
            String recPath = mRecFolder + "/sample_rec.wav";

            System.Diagnostics.Debug.WriteLine(recPath);

            File            file        = new File(recPath);
            FileInputStream inputStream = new FileInputStream(file);

            // Streamモードで再生を行うので、リングバッファサイズを取得
            //TrackBuffer.Instance.Frames = AudioTrack.GetMinBufferSize(8000, ChannelOut.Mono, mFormat);
            // Xperia Z4 = 820
            //TrackBuffer.Instance.Frames = 1024;

            // AudioTrackを生成する
            mAudioTrack = new AudioTrack(
                Stream.Music,
                //Stream.VoiceCall,
                mSamplingRate,
                ChannelOut.Mono,
                mFormat,
                AudioTrack.GetMinBufferSize(mSamplingRate, ChannelOut.Mono, mFormat),
                AudioTrackMode.Stream);

            // コールバックを指定
            //mAudioTrack.SetPlaybackPositionUpdateListener(new OnPlaybackPositionUpdateListener());

            //通知の発生するフレーム数を指定
            //mAudioTrack.SetPositionNotificationPeriod(TrackBuffer.Instance.Frames);

            Task.Run(() =>
            {
                System.Diagnostics.Debug.WriteLine("AudioTrack play a recording file");
                mAudioTrack.Play();

                Byte[] dat = new Byte[file.Length()];
                inputStream.Read(dat);
                mAudioTrack.Write(dat, 0, dat.Length);
            });
        }
Example #18
0
        public void InitAudioTrack()
        {
            audioTrack?.Stop();
            audioTrack?.Release();
            audioTrack?.Dispose();
            audioTrack = null;

            int minBufSize = AudioTrack.GetMinBufferSize(sampleRate, channelOut, encoding);

            audioTrack = new AudioTrack(
                Android.Media.Stream.Music,
                // Frequency
                sampleRate,
                // Mono or stereo
                channelOut,
                // Audio encoding
                encoding,
                // Length of the audio clip.
                minBufSize * 4,
                // Mode. Stream or static.
                AudioTrackMode.Stream);
        }
Example #19
0
    private void initRecorder()
    {
        Encoding encoding = Encoding.Pcm16bit;

        bufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, encoding);
        buffer     = new byte[bufferSize];

        audioRecorder = new AudioRecord(
            // Hardware source of recording.
            AudioSource.Mic,
            // Frequency
            44100,
            // Mono or stereo
            ChannelIn.Mono,
            // Audio encoding
            encoding,
            // Length of the audio clip.
            bufferSize * 5
            );

        audioRecorder.StartRecording();
    }
Example #20
0
        void StartRecording()
        {
            _cts = new CancellationTokenSource();

            _task = Task.Run(async() =>
            {
                var minBufferSize = AudioTrack.GetMinBufferSize(16000, ChannelOut.Mono, Encoding.Pcm16bit);

                var recorder = new AudioRecord(AudioSource.Mic, 16000, ChannelIn.Mono, Encoding.Pcm16bit, minBufferSize);

                recorder.StartRecording();

                var audioBuffer = new byte[minBufferSize];

                await using var fileStream =
                                new FileStream(GetFileNameForRecording(this, Ext),
                                               FileMode.Create,
                                               FileAccess.Write);

                while (!_cts.IsCancellationRequested)
                {
                    var bytesRead = await recorder.ReadAsync(audioBuffer, 0, minBufferSize);

                    await fileStream.WriteAsync(audioBuffer, 0, bytesRead, CancellationToken.None);
                }

                await using var writer = new BinaryWriter(fileStream, System.Text.Encoding.UTF8);

                writer.Seek(0, SeekOrigin.Begin);

                // ChunkID
                writer.Write('R');
                writer.Write('I');
                writer.Write('F');
                writer.Write('F');

                // ChunkSize
                writer.Write(BitConverter.GetBytes(fileStream.Length + 36), 0, 4);

                // Format
                writer.Write('W');
                writer.Write('A');
                writer.Write('V');
                writer.Write('E');

                //SubChunk
                writer.Write('f');
                writer.Write('m');
                writer.Write('t');
                writer.Write(' ');

                // SubChunk1Size - 16 for PCM
                writer.Write(BitConverter.GetBytes(16), 0, 4);

                // AudioFormat - PCM=1
                writer.Write(BitConverter.GetBytes((short)1), 0, 2);

                // Channels: Mono=1, Stereo=2
                writer.Write(BitConverter.GetBytes(1), 0, 2);

                // SampleRate
                writer.Write(16000);

                // ByteRate
                var byteRate = 16000 * 1 * 16 / 8;
                writer.Write(BitConverter.GetBytes(byteRate), 0, 4);

                // BlockAlign
                var blockAlign = 1 * 16 / 8;
                writer.Write(BitConverter.GetBytes((short)blockAlign), 0, 2);

                // BitsPerSample
                writer.Write(BitConverter.GetBytes(16), 0, 2);

                // SubChunk2ID
                writer.Write('d');
                writer.Write('a');
                writer.Write('t');
                writer.Write('a');

                // Subchunk2Size
                writer.Write(BitConverter.GetBytes(fileStream.Length), 0, 4);

                fileStream.Close();

                recorder.Stop();
                recorder.Release();
            }, CancellationToken.None);
        }
        public void Read()
        {
            System.Threading.Tasks.Task.Run(() =>
            {
                AudioTrack _output;

                int buffsize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit);
                //_output = new AudioTrack(Android.Media.Stream.Music, 44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit,
                //buffsize, AudioTrackMode.Stream);
                var AABuilder = new AudioAttributes.Builder();

                AABuilder.SetContentType(AudioContentType.Music);
                AABuilder.SetUsage(AudioUsageKind.Media);

                var AfBuilder = new AudioFormat.Builder();
                AfBuilder.SetSampleRate(44100);
                AfBuilder.SetEncoding(Android.Media.Encoding.Pcm16bit);
                AfBuilder.SetChannelMask(ChannelOut.Stereo);


                _output = new AudioTrack(AABuilder.Build(), AfBuilder.Build(), buffsize, AudioTrackMode.Stream, AudioManager.AudioSessionIdGenerate);
                _output.Play();

                byte[] myReadBuffer = new byte[1000];
                int count           = 4;
                System.Threading.Tasks.Task.Run(() =>
                {
                    while (true)
                    {
                        try
                        {
                            int a = mmInStream.Read(myReadBuffer, 0, myReadBuffer.Length);
                            if (a > 0)
                            {
                                if (count == 0)
                                {
                                    _output.Write(myReadBuffer, 0, myReadBuffer.Length);
                                }
                                else
                                {
                                    count--;
                                }
                            }
                            else
                            {
                                var t = a;
                            }
                        }
                        catch (System.IO.IOException ex)
                        {
                            System.Diagnostics.Debug.WriteLine("Input stream was disconnected", ex);
                        }
                    }
                }).ContinueWith((t) =>
                {
                    var a = t.Exception;
                });
            }).ContinueWith((t) =>
            {
                var a = t.Exception;
            });
        }
Example #22
0
        public void record()
        {
            try
            {
                recorder?.Stop();
                recorder?.Release();
                recorder?.Dispose();
                recorder = null;


                int minBufSize = AudioTrack.GetMinBufferSize(sampleRate, channelOut, encoding);
                //       DatagramSocket socket = new DatagramSocket();
                byte[] buffer = new byte[minBufSize];
                //     DatagramPacket packet;
                //     InetAddress destination = InetAddress.GetByName(serverAddress);
                recorder = new AudioRecord(AudioSource.VoiceCommunication, sampleRate, channelIn, encoding, minBufSize * 4);

                if (recorder.State != Android.Media.State.Initialized)
                {
                    return;
                }
                recorder.StartRecording();

                // string Path = System.IO.Path.Combine(System.Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments), "temp.pcm");

                //System.IO.File.Delete(Path);

                //recorder.StartRecording();

                //DateTime currentTime = DateTime.Now.AddMilliseconds(500);

                //FileStream fs = new FileStream(Path, FileMode.Append, FileAccess.Write);

                //while (recorder.RecordingState == RecordState.Recording)
                //{
                //    minBufSize = recorder.Read(buffer, 0, buffer.Length);

                //    if (completefile == true)
                //        continue;

                //    fs.Write(buffer, 0, buffer.Length);

                //    if (currentTime < DateTime.Now)
                //    {
                //        fs.Close();

                //        play(File.ReadAllBytes(Path));
                //    //    NetProcess.SendAudioMessage(File.ReadAllBytes(Path));

                //        File.Delete(Path);
                //        var r = File.Exists(Path);

                //        fs = new FileStream(Path, FileMode.Append, FileAccess.Write);

                //        currentTime = DateTime.Now.AddMilliseconds(500);

                //    }
                //}

                ConcurrentQueue <System.IO.MemoryStream> Frames = new ConcurrentQueue <System.IO.MemoryStream>();

                while (recorder?.RecordingState == RecordState.Recording)
                {
                    if (recorder == null)
                    {
                        return;
                    }

                    minBufSize = recorder.Read(buffer, 0, buffer.Length);

                    Frames.Enqueue(new MemoryStream(buffer));

                    if (Frames.Count > 0)
                    {
                        if (NetProcess.TargetPlayerId.Count > 0)
                        {
                            NetProcess.SendAudioMessage(Frames);
                        }

                        Frames.Clear();
                    }
                }
            }
            catch (Exception e)
            {
            }
            finally
            {
                recorder.Stop();
                recorder.Release();
                recorder.Dispose();
                recorder = null;
            }
        }