Example #1
0
        public void playSound()
        {
            var duration   = 3;
            var sampleRate = 44100;
            var numSamples = duration * sampleRate;
            var sample     = new double[numSamples];
            var freqOfTone = currFrequency;

            byte[] generatedSnd = new byte[2 * numSamples];

            for (int i = 0; i < numSamples; ++i)
            {
                sample[i] = Math.Sin(2 * Math.PI * i / (sampleRate / freqOfTone));
            }

            int idx = 0;

            foreach (double dVal in sample)
            {
                short val = (short)(dVal * 32767);
                generatedSnd[idx++] = (byte)(val & 0x00ff);
                generatedSnd[idx++] = (byte)((val & 0xff00) >> 8);
            }

            var track = new AudioTrack(global::Android.Media.Stream.Music, sampleRate, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit, numSamples, AudioTrackMode.Static);

            track.Write(generatedSnd, 0, numSamples);
            track.Play();
        }
Example #2
0
    private void initPlayer()
    {
        Encoding encoding = Encoding.Pcm16bit;

        bufferSize = AudioTrack.GetMinBufferSize(sampleRate, ChannelOut.Mono, encoding);
        Logging.info("Min. buffer size " + bufferSize);
        int new_buffer_size = CodecTools.getPcmFrameByteSize(sampleRate, bitRate, channels) * 100;

        if (bufferSize < new_buffer_size)
        {
            bufferSize = (int)(Math.Ceiling((decimal)new_buffer_size / bufferSize) * bufferSize);
        }
        Logging.info("Final buffer size " + bufferSize);

        // Prepare player
        AudioAttributes aa = new AudioAttributes.Builder()
                             .SetContentType(AudioContentType.Speech)
                             .SetFlags(AudioFlags.LowLatency)
                             .SetUsage(AudioUsageKind.VoiceCommunication)
                             .Build();

        AudioFormat af = new AudioFormat.Builder()
                         .SetSampleRate(sampleRate)
                         .SetChannelMask(ChannelOut.Mono)
                         .SetEncoding(encoding)
                         .Build();

        audioPlayer = new AudioTrack(aa, af, bufferSize, AudioTrackMode.Stream, 0);

        MainActivity.Instance.VolumeControlStream = Stream.VoiceCall;

        audioPlayer.Play();
    }
Example #3
0
    public void start()
    {
        if (running)
        {
            Logging.warn("Audio player is already running.");
            return;
        }

        running = true;

        AudioAttributes aa = new AudioAttributes.Builder()
                             .SetContentType(AudioContentType.Speech)
                             .SetLegacyStreamType(Stream.VoiceCall)
                             .SetFlags(AudioFlags.LowLatency)
                             .SetUsage(AudioUsageKind.VoiceCommunication)
                             .Build();
        AudioFormat af = new AudioFormat.Builder()
                         .SetSampleRate(44100)
                         .SetChannelMask(ChannelOut.Mono)
                         .SetEncoding(Encoding.Pcm16bit)
                         .Build();

        audioPlayer = new AudioTrack(aa, af, AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, Encoding.Pcm16bit) * 100, AudioTrackMode.Stream, 0);

        audioPlayer.SetVolume(0.8f);

        audioPlayer.Play();
    }
Example #4
0
    private void initPlayer()
    {
        Encoding encoding = Encoding.Pcm16bit;

        // Prepare player
        AudioAttributes aa = new AudioAttributes.Builder()
                             .SetContentType(AudioContentType.Speech)
                             .SetLegacyStreamType(Stream.VoiceCall)
                             .SetFlags(AudioFlags.LowLatency)
                             .SetUsage(AudioUsageKind.VoiceCommunication)
                             .Build();

        AudioFormat af = new AudioFormat.Builder()
                         .SetSampleRate(44100)
                         .SetChannelMask(ChannelOut.Mono)
                         .SetEncoding(encoding)
                         .Build();

        bufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, encoding) * 10;

        audioPlayer = new AudioTrack(aa, af, bufferSize, AudioTrackMode.Stream, 0);

        // TODO implement dynamic volume control
        AudioManager am = (AudioManager)MainActivity.Instance.GetSystemService(Context.AudioService);

        audioPlayer.SetVolume(am.GetStreamVolume(Stream.VoiceCall));

        audioPlayer.Play();
    }
Example #5
0
        private async void ConnectButtonOnClick(object sender, EventArgs eventArgs)
        {
            _client = new TcpSocketClient();
            await _client.ConnectAsync("192.168.1.3", 4088);

            _connectionStatus.Text = "Connected";
            _track.Play();

            try
            {
                var buffer       = new byte[BufferSize];
                var offset       = 0;
                var actuallyRead = 0;

                do
                {
                    actuallyRead = await _client.Socket.GetStream().ReadAsync(buffer, 0, BufferSize);

                    offset       += actuallyRead;
                    _message.Text = offset.ToString();
                    _track.Write(buffer, 0, actuallyRead);
                } while (actuallyRead != 0);

                //_track.Stop();
                _message.Text = "Completed";
            }
            catch (Exception exception)
            {
                _message.Text = exception.Message;
                throw;
            }
        }
Example #6
0
        public async override Task Collide(IGem targetGem)
        {
            if (targetGem is Gem)
            {
                if (!_board.CurrentSwipeResult.DeadGems.Contains(this))
                {
                    _board.CurrentSwipeResult.DeadGems.Add(this);
                }
                Gem target = (Gem)targetGem;
                NullifyFloatingBehaviour();
                target.NullifyFloatingBehaviour();

                AudioTrack pinDrop = new AudioTrack(AudioTrackConst.PinDropping);

                target.LevelUp();

                await PerformAction(
                    () => Move(target.IndexX, target.IndexY),
                    () => Die(),
                    () => pinDrop.Play(),
                    () => target.Fuse(),
                    () => target.TightenFloatingBehaviour(_radius / 8)
                    );
            }
            else
            {
                await Task.Delay(0);
            }
        }
Example #7
0
        private void StartStreamingAudio(short[] initialData)
        {
#if __ANDROID__
            if (playingTrack != null)
            {
                if (playingTrack.PlayState == PlayState.Playing)
                {
                    throw new InvalidOperationException("Audio is already playing.");
                }
            }

            playingTrack = new AudioTrack(
                // Stream type
                Android.Media.Stream.Music,
                // Frequency
                44100,
                // Mono or stereo
                ChannelOut.Mono,
                // Audio encoding
                Android.Media.Encoding.Pcm16bit,
                // Length of the audio clip.
                (initialData.Length * 2) * 2, // Double buffering
                // Mode. Stream or static.
                AudioTrackMode.Stream);

            playingTrack.PeriodicNotification += OnStreamingAudioPeriodicNotification;
            playingTrack.SetPositionNotificationPeriod(initialData.Length);

            playingTrack.Play();
            playingTrack.Write(initialData, 0, initialData.Length);
#endif
        }
Example #8
0
        public async Task PlayOnce(System.IO.Stream stream)
        {
            await Task.Run(() =>
            {
                try
                {
                    int sampleRate = 16000;
                    var channel    = ChannelOut.Mono;
                    var encoding   = Android.Media.Encoding.Pcm16bit;
                    var buffSize   = AudioTrack.GetMinBufferSize(sampleRate, channel, encoding);

                    if (_playOnceAudioTrack == null)
                    {
                        _playOnceAudioTrack = new AudioTrack(Stream.Music, sampleRate, channel, encoding, buffSize, AudioTrackMode.Stream);
                    }

                    _playOnceAudioTrack.Stop();
                    _playOnceAudioTrack.Flush();

                    var buffer = new byte[stream.Length];
                    stream.Read(buffer, 0, buffer.Length);

                    _playOnceAudioTrack.Play();
                    _playOnceAudioTrack.Write(buffer, 0, buffer.Length);
                }
                catch (Exception ex)
                {
                }
            });
        }
        /// <summary>
        /// Plays a single note. Separate from the rest of the song playing code
        /// </summary>
        public static void PlayNote(Instrument.Note note)
        {
            lock (syncObj)
            {
#if __ANDROID__
                if (playingTrack != null)
                {
                    //We use pause instead of stop because pause stops playing immediately
                    playingTrack.Pause();
                    playingTrack.Release();
                    playingTrack.Dispose();
                }
#endif
#if __IOS__
                if (audioQueue != null)
                {
                    //Pass true to stop immediately
                    audioQueue.Stop(true);
                    audioQueue.Dispose();
                }
#endif

#if __ANDROID__
                playingTrack = new AudioTrack(
                    // Stream type
                    Android.Media.Stream.Music,
                    // Frequency
                    SongPlayer.PLAYBACK_RATE,
                    // Mono or stereo
                    ChannelOut.Mono,
                    // Audio encoding
                    Android.Media.Encoding.Pcm16bit,
                    // Length of the audio clip in bytes
                    (note.data.Length * 2),
                    // Mode. Stream or static.
                    AudioTrackMode.Static);

                playingTrack.Write(note.data, 0, note.data.Length);
                playingTrack.Play();
#endif
#if __IOS__
                audioQueue = new OutputAudioQueue(AudioStreamBasicDescription.CreateLinearPCM(SongPlayer.PLAYBACK_RATE, 1, 16, false));
                unsafe
                {
                    AudioQueueBuffer *buffer;
                    audioQueue.AllocateBuffer(note.data.Length * 2, out buffer);

                    fixed(short *beatData = note.data)
                    {
                        buffer->CopyToAudioData((IntPtr)beatData, note.data.Length * 2);
                    }

                    audioQueue.EnqueueBuffer((IntPtr)buffer, note.data.Length * 2, null);
                }

                audioQueue.Start();
#endif
            }
        }
Example #10
0
 /// <summary>
 /// starts recording asycn to avoid blocking main UI thread
 /// </summary>
 /// <param name="callback"></param>
 public void StartRecordingAsync(Action <byte[]> callback)
 {
     Task.Run(() =>
     {
         audioTrack.Play();
         StartRecording(callback);
     }); //.ConfigureAwait(false);
 }
Example #11
0
 /// <summary>
 /// Resume the audio
 /// </summary>
 public static void Resume()
 {
     if (active && paused && !song.IsPlaying)
     {
         song.Play();
         paused = false;
     }
 }
Example #12
0
 public void Play()
 {
     if (State == SoundStates.Playing)
     {
         return;
     }
     if (instance == null)
     {
         createPlayInstance();
     }
     if (State != SoundStates.Paused)
     {
         volume = 1;
     }
     instance.Play();
     State = SoundStates.Playing;
 }
Example #13
0
        /// <summary>
        /// Start the platform-specific audio object and give it some initial data (beat0 and beat1)
        /// </summary>
        private void StartStreamingAudio(short[] beat0, short[] beat1)
        {
#if __ANDROID__
            playingTrack = new AudioTrack(
                // Stream type
                Android.Media.Stream.Music,
                // Frequency
                PLAYBACK_RATE,
                // Mono or stereo
                ChannelOut.Mono,
                // Audio encoding
                Android.Media.Encoding.Pcm16bit,
                // Length of the audio clip  in bytes
                (samplesPerBeat * 2) * 2, //Multiply by 2 because we want two beats to fit in the playingTrack's memory
                // Mode. Stream or static.
                AudioTrackMode.Stream);

            //Set up notifications at the end of beats
            playingTrack.PeriodicNotification += OnStreamingAudioPeriodicNotification;
            playingTrack.SetPositionNotificationPeriod(samplesPerBeat);

            //Write the initial data and begin playing
            playingTrack.Write(beat0, 0, beat0.Length);
            playingTrack.Write(beat1, 0, beat1.Length);
            playingTrack.Play();
#endif
#if __IOS__
            audioQueue = new OutputAudioQueue(streamDesc);
            unsafe
            {
                //Allocate two buffers to store audio data
                AudioQueueBuffer *buffer0;
                AudioQueueBuffer *buffer1;
                audioQueue.AllocateBuffer(beat0.Length * 2, out buffer0);
                audioQueue.AllocateBuffer(beat1.Length * 2, out buffer1);

                //Copy initial audio data to the buffers
                fixed(short *beatData0 = beat0)
                {
                    buffer0->CopyToAudioData((IntPtr)beatData0, beat0.Length * 2);
                }

                fixed(short *beatData1 = beat1)
                {
                    buffer1->CopyToAudioData((IntPtr)beatData1, beat1.Length * 2);
                }

                //Add the buffers to the queue
                audioQueue.EnqueueBuffer((IntPtr)buffer0, beat0.Length * 2, null);
                audioQueue.EnqueueBuffer((IntPtr)buffer1, beat1.Length * 2, null);
            }

            //Set up periodic notifications
            audioQueue.BufferCompleted += OnStreamingAudioPeriodicNotification;
            audioQueue.Start();
#endif
        }
Example #14
0
        /// <summary>
        /// </summary>
        public TestAudioScreen()
        {
            Song  = new AudioTrack(GameBase.Game.Resources.Get("Wobble.Tests.Resources/Tracks/virt - Send My Love To Mars.mp3"));
            Train = new AudioSample(GameBase.Game.Resources.Get("Wobble.Tests.Resources/SFX/train.wav"));

            Song?.Play();

            View = new TestAudioScreenView(this);
        }
Example #15
0
        /*********************************************************************************
        *
        *
        *********************************************************************************/
        public void ButtonPlay_Click(object sender, EventArgs e)
        {
            //String musicFolder = Android.OS.Environment.GetExternalStoragePublicDirectory(Android.OS.Environment.DirectoryMusic).Path;
            String filePath = mRecFolder + "/sample_mono_8k8bit.wav";

            //String filePath = musicFolder + "/sample_stereo_44k16bit.wav";

            System.Diagnostics.Debug.WriteLine(filePath);

            File            file        = new File(filePath);
            FileInputStream inputStream = new FileInputStream(file);

            // Streamモードで再生を行うので、リングバッファサイズを取得
            Int32 bufferSize = AudioTrack.GetMinBufferSize(mSamplingRate, ChannelOut.Mono, mFormat);

            System.Diagnostics.Debug.WriteLine("AudioTrack : GetMinBufferSize={0}", bufferSize);

            // Frame size
            TrackBuffer.Instance.Frames = mFrameSize;

            // AudioTrackを生成する
            mAudioTrack = new AudioTrack(
                Stream.Music,
                //Stream.VoiceCall,
                mSamplingRate,
                ChannelOut.Mono,
                mFormat,
                bufferSize,
                AudioTrackMode.Stream);

            // コールバックを指定
            mAudioTrack.SetPlaybackPositionUpdateListener(new OnPlaybackPositionUpdateListener());

            //通知の発生するフレーム数を指定
            mAudioTrack.SetPositionNotificationPeriod(TrackBuffer.Instance.Frames);

            TrackBuffer.Instance.Clear();

            Task.Run(() => {
                while (true)
                {
                    if (TrackBuffer.Instance.Count > 5)
                    {
                        break;
                    }
                }

                System.Diagnostics.Debug.WriteLine("AudioTrack play streaming data");
                mAudioTrack.Play();

                Byte[] wav = null;
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
                wav        = TrackBuffer.Instance.Dequeue(); mAudioTrack.Write(wav, 0, wav.Length);
            });
        }
        public void Play(Song song)
        {
            lock (stateLocker)
            {
                player = new AudioTrack(Stream.Music, SampleRate, ChannelOut.Mono, Encoding.PcmFloat, 1000, AudioTrackMode.Stream);

                player.Play();

                OnMessage?.Invoke(this, "Playing");

                var done     = false;
                var position = 0;

                while (!done)
                {
                    var accumulator           = new float[BufferSize];
                    var numberOfMergedTracks  = 0;
                    var numberOfMergedSamples = 0;

                    foreach (var track in song.Tracks)
                    {
                        var samples = track.Read(position, BufferSize);
                        if (samples != null)
                        {
                            numberOfMergedSamples = Math.Min(samples.Length, accumulator.Length);
                            for (int i = 0; i < numberOfMergedSamples; i++)
                            {
                                accumulator[i] += samples[i];
                            }
                            numberOfMergedTracks++;
                        }
                    }

                    if (numberOfMergedTracks == 0)
                    {
                        break;
                    }

                    var mergedSamples = accumulator
                                        .Take(numberOfMergedSamples)
                                        .Select(x => x / (float)numberOfMergedTracks)
                                        .ToArray();

                    var result = player.Write(mergedSamples, 0, mergedSamples.Length, WriteMode.Blocking);

                    if (result < 0)
                    {
                        throw new Exception($"Failed read: {result}");
                    }

                    position += result;
                }

                Stop();
            }
        }
Example #17
0
 public SoundPlayer()
 {
     _minBufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Stereo, Encoding.Pcm16bit);
     _audioTrack    = new AudioTrack(Stream.Music, 44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit, _minBufferSize, AudioTrackMode.Stream);
     while (_audioTrack.State != AudioTrackState.Initialized)
     {
         ;
     }
     _audioTrack.Play();
 }
        /// <inheritdoc />
        /// <summary>
        /// </summary>
        /// <param name="screen"></param>
        public TestBitmapFontScreenView(Screen screen) : base(screen)
        {
            SongTimeText = new SpriteText("exo2-bold", "0", 16)
            {
                Parent = Container,
            };

            Track = new AudioTrack(GameBase.Game.Resources.Get("Wobble.Tests.Resources/Tracks/virt - Send My Love To Mars.mp3"));
            Track.Play();
        }
Example #19
0
        public void OnWorldStateChanged(WorldState state)
        {
            if (state == WorldState.Day)
            {
                _dayStart.Play(_audioSource);
                return;
            }

            _nightStart.Play(_audioSource);
        }
        //public void Write(byte[] bytes)
        //{
        //    /*
        //    foreach(System.IO.Stream temp in streamList)
        //    {
        //        try
        //        {
        //            temp.Write(bytes);
        //        }
        //        catch (System.IO.IOException ex)
        //        {
        //            System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
        //        }
        //    }
        //    *
        //    */

        //    try
        //    {
        //        mmOutStream.Write(bytes);
        //    }
        //    catch (System.IO.IOException ex)
        //    {
        //        System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
        //    }

        //}

        public void Write(System.IO.Stream stream)
        {
            /*
             * foreach(System.IO.Stream temp in streamList)
             * {
             *  try
             *  {
             *      temp.Write(bytes);
             *  }
             *  catch (System.IO.IOException ex)
             *  {
             *      System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
             *  }
             * }
             *
             */
            AudioTrack _output;

            int buffsize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit);
            //_output = new AudioTrack(Android.Media.Stream.Music, 44100, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit,
            //buffsize, AudioTrackMode.Stream);
            var AABuilder = new AudioAttributes.Builder();

            AABuilder.SetContentType(AudioContentType.Music);
            AABuilder.SetUsage(AudioUsageKind.Media);

            var AfBuilder = new AudioFormat.Builder();

            AfBuilder.SetSampleRate(44100);
            AfBuilder.SetEncoding(Android.Media.Encoding.Pcm16bit);
            AfBuilder.SetChannelMask(ChannelOut.Stereo);


            _output = new AudioTrack(AABuilder.Build(), AfBuilder.Build(), buffsize, AudioTrackMode.Stream, AudioManager.AudioSessionIdGenerate);
            _output.Play();
            try
            {
                byte[] buffer        = new byte[1000];
                int    bytesReturned = 1;

                while (bytesReturned > 0)
                {
                    bytesReturned = stream.Read(buffer, 0, buffer.Length);
                    mmOutStream.Write(buffer);
                    _output.Write(buffer, 0, buffer.Length);
                    //DependencyService.Get<BluetoothManager>().Write(buffer);
                }
                stream.Close();
            }
            catch (System.IO.IOException ex)
            {
                System.Diagnostics.Debug.WriteLine("Error occurred when sending data", ex);
            }
        }
Example #21
0
        public void OnWorldStateChanged(WorldState state)
        {
            _source.Stop();
            if (state == WorldState.Day)
            {
                _dayMusic.Play(_source);
                return;
            }

            _nightMusic.Play(_source);
        }
Example #22
0
 /// <summary>
 /// Plays the wave file
 /// </summary>
 public void Play()
 {
     // Now play the audio track via a background task, but make sure only one can play at a time
     Task.Run(() => {
         lock (_audioTrack) {
             _audioTrack.Play();
             _audioTrack.Write(_soundBuffer, 0, _soundBuffer.Length);
             _audioTrack.Write(_silence, 0, _silence.Length);
         }
     });
 }
Example #23
0
 public void PlaySound(int sampling_rate, byte[] pcm_data)
 {
     if (audio != null)
     {
         audio.Stop();
         audio.Release();
     }
     audio = new AudioTrack(Stream.Music, sampling_rate, ChannelOut.Mono, Encoding.Pcm16bit
                            , pcm_data.Length * sizeof(short), AudioTrackMode.Static);
     audio.Write(pcm_data, 0, pcm_data.Length);
     audio.Play();
 }
 public bool Play()
 {
     if (audioTrack != null)
     {
         audioTrack.Play();
         return(true);
     }
     else
     {
         throw new Exception("audioTrack is not opend");
     }
 }
Example #25
0
 public void PlayProgressTone()
 {
     StopProgressTone();
     try
     {
         mProgressTone = CreateProgressTone(mContext);
         mProgressTone.Play();
     }
     catch (Exception e)
     {
         Log.Error(LOG_TAG, "Could not play progress tone", e);
     }
 }
Example #26
0
 protected async Task PlayAudioTrackAsync()
 {
     audioTrack = new AudioTrack(
         Android.Media.Stream.Music,
         //Frequency
         11025,
         ChannelOut.Mono,
         Android.Media.Encoding.Pcm16bit,
         buffer.Length,
         AudioTrackMode.Stream);
     audioTrack.Play();
     await audioTrack.WriteAsync(buffer, 0, buffer.Length);
 }
Example #27
0
        /* //play short tone on cennection
         * private void websocketClient_Opened(object sender, EventArgs e)
         * {
         *
         *  websocketClient.Send("2000");
         *  Thread.Sleep(333);
         *  websocketClient.Send("2000");
         *
         * }
         */

        private void websocketClient_MessageReceived(object sender, MessageReceivedEventArgs e)
        {
            if (!isPlaying)
            {
                byte[] GeneratedSnd = CreateSound(int.Parse(e.Message));
                track     = new AudioTrack(Stream.Music, 8000, ChannelOut.Mono, Encoding.Pcm16bit, 80000, AudioTrackMode.Static);
                prevTone  = int.Parse(e.Message);
                isPlaying = !isPlaying;
                track.Write(GeneratedSnd, 0, 80000);

                try
                {
                    track.Play();
                }
                catch (Java.Lang.IllegalStateException)
                {
                    track.Flush();
                    track.Release();
                }
            }
            else if (isPlaying && prevTone != int.Parse(e.Message))
            {
                isPlaying = !isPlaying;
                track.Stop();
                track.Flush();
                track.Release();

                byte[] GeneratedSnd = CreateSound(int.Parse(e.Message));
                track     = new AudioTrack(Stream.Music, 8000, ChannelOut.Mono, Encoding.Pcm16bit, 80000, AudioTrackMode.Static);
                prevTone  = int.Parse(e.Message);
                isPlaying = !isPlaying;
                track.Write(GeneratedSnd, 0, 80000);

                try
                {
                    track.Play();
                }
                catch (Java.Lang.IllegalStateException)
                {
                    track.Flush();
                    track.Release();
                }
            }
            else
            {
                isPlaying = !isPlaying;
                track.Stop();
                track.Flush();
                track.Release();
            }
        }
        void PlayAudioTrack(byte[] audBuffer)
        {
            audioTrack = new AudioTrack(
                Android.Media.Stream.Music, // Stream type
                sampleRate,                 // Frequency
                channelOut,                 // Mono or stereo
                encoding,                   // Audio encoding
                audBuffer.Length,           // Length of the audio clip.
                AudioTrackMode.Stream       // Mode. Stream or static.
                );

            audioTrack.Play();
            audioTrack.Write(audBuffer, 0, audBuffer.Length);
        }
Example #29
0
        public void SetupAudio(int sampleRate, int channels, byte[] esdsData)
        {
            _audioTrack = new AudioTrack(
                new AudioAttributes.Builder()
                .SetUsage(AudioUsageKind.Media)
                .SetContentType(AudioContentType.Music)
                .SetFlags(AudioFlags.LowLatency)
                .Build(),
                new Android.Media.AudioFormat.Builder()
                .SetEncoding(Encoding.Pcm16bit)
                .SetSampleRate(44100)
                .SetChannelMask(ChannelOut.Stereo)
                .Build(),
                4096,
                AudioTrackMode.Stream,
                AudioManager.AudioSessionIdGenerate);

            MediaFormat audioFormat = MediaFormat.CreateAudioFormat(
                mime: MediaFormat.MimetypeAudioAac,
                sampleRate: sampleRate,
                channelCount: channels);

            audioFormat.SetInteger(MediaFormat.KeyIsAdts, 0);
            audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc);

            _audioCodec = MediaCodec.CreateDecoderByType(
                MediaFormat.MimetypeAudioAac);

            // TODO: Remove hardcoding
            byte profile     = (byte)MediaCodecProfileType.Aacobjectlc;
            byte sampleIndex = AacAdtsAssembler.GetSamplingFrequencyIndex(sampleRate);

            byte[] csd0 = new byte[2];
            csd0[0]  = (byte)(((byte)profile << 3) | (sampleIndex >> 1));
            csd0[1]  = (byte)((byte)((sampleIndex << 7) & 0x80) | (channels << 3));
            esdsData = csd0;

            audioFormat.SetByteBuffer("csd-0", Java.Nio.ByteBuffer.Wrap(esdsData));


            _audioCodec.SetCallback(this);
            _audioCodec.Configure(
                format: audioFormat,
                surface: null,
                crypto: null,
                flags: MediaCodecConfigFlags.None);

            _audioCodec.Start();
            _audioTrack.Play();
        }
        public void PlaySound(int samplingRate, byte[] pcmData)
        {
            if (previousAudioTrack != null)
            {
                previousAudioTrack.Stop();
                previousAudioTrack.Release();
            }

            AudioTrack audioTrack = new AudioTrack(Stream.Music, samplingRate, ChannelOut.Mono, Android.Media.Encoding.Pcm16bit, pcmData.Length * sizeof(short), AudioTrackMode.Static);

            audioTrack.Write(pcmData, 0, pcmData.Length);
            audioTrack.Play();
            previousAudioTrack = audioTrack;
        }