public AndroidAudioStreamingPlayer(IMediaBufferGenerator generator, AudioParameters parameters, TimeSpan totalTime) { if (generator == null) throw new ArgumentNullException ("generator"); gen = generator; min_buf_size = AudioTrack.GetMinBufferSize ( parameters.SamplesPerSecond / CompressionRate * 2, ChannelOut.Stereo, Encoding.Pcm16bit ); buf_size = min_buf_size * 8; // "* n" part is adjusted for device. audio = new AudioTrack ( Android.Media.Stream.Music, parameters.SamplesPerSecond / CompressionRate * 2, ToChannelConfiguration (parameters.Channels), Android.Media.Encoding.Pcm16bit, buf_size * 4, AudioTrackMode.Stream ); player_thread = new Thread (() => DoRun ()); this.total_time = totalTime; }
public static AudioTrack FindAudioTrack(ref int sampleRate, ref Android.Media.Encoding audioFormat, ref ChannelOut channelConfig, ref int bufferSize) { foreach (var sr in _sampleRates) { foreach (var af in new Android.Media.Encoding[] { Android.Media.Encoding.Pcm16bit, Android.Media.Encoding.Pcm8bit }) { foreach (var cc in new ChannelOut[] { ChannelOut.Stereo, ChannelOut.Mono }) { foreach (var atm in new AudioTrackMode[] { AudioTrackMode.Static, AudioTrackMode.Stream}) { int bs = AudioTrack.GetMinBufferSize(sr, cc, af); if (bs > 0) { var audioTrack = new AudioTrack(Stream.Music, sr, cc, af, bs, atm); if (audioTrack.State == AudioTrackState.Initialized) { sampleRate = sr; audioFormat = af; channelConfig = cc; bufferSize = bs; return audioTrack; } } } } } } return null; }
public async void PlayStream(string text) { var bufferSize = AudioTrack.GetMinBufferSize(16000, ChannelOut.Stereo, Encoding.Pcm16bit); ////new System.Threading.Thread(delegate (object o) //// { HttpWebRequest req = (HttpWebRequest)WebRequest.Create(Helpers.URL.NuanceURL); req.Method = "POST"; req.KeepAlive = false; req.Accept = "audio/x-wav;codec=pcm;bit=16;rate=16000"; // req.Accept = "audio/x-wav"; req.ContentType = "text/plain"; var streamOut = new StreamWriter(req.GetRequestStream(), System.Text.Encoding.ASCII); streamOut.Write(text); streamOut.Close(); audioTrack = new AudioTrack( Android.Media.Stream.Music, 8000, ChannelOut.Stereo, Android.Media.Encoding.Pcm16bit, bufferSize, AudioTrackMode.Stream); using (var stream = req.GetResponse().GetResponseStream()) { byte[] buffer = new byte[65536]; int read; audioTrack.Play(); while ((read = stream.Read(buffer, 0, buffer.Length)) > 0) { var pos = ms.Position; ms.Position = ms.Length; ms.Write(buffer, 0, read); ms.Position = pos; } } // }).Start(); //while (ms.Length < bufferSize * 5) //{ // System.Threading.Thread.Sleep(1000); //} var br = new BinaryReader(ms); await PlayAudioTrack(br.ReadBytes((int)ms.Length)); ////ms.Position = 0; ////var br = new BinaryReader(ms); ////PlayAudioTrack(br.ReadBytes((int)ms.Length)); audioTrack.Release(); }
public void Stop () { if (audioTrack != null) { audioTrack.Stop (); audioTrack.Release (); audioTrack = null; } }
void PutByte(AudioTrack at, byte b) { PutBit(at, true); for (int i = 7; i >= 0; i--) { PutBit(at, ((b >> i) & 1) == 1); } PutBit(at, false); }
void PutBit(AudioTrack at, bool bit) { if (bit) { WriteValue(at, BitConverter.GetBytes(-0x7fff), Bitlen); WriteValue(at, BitConverter.GetBytes(0x7fff), Bitlen); } else { WriteValue(at, BitConverter.GetBytes(-0x3fff), Bitlen / 2); WriteValue(at, BitConverter.GetBytes(0x3fff), Bitlen / 2); } }
public TrackInfo(AudioTrack track, IntPtr javaDataBuffer, byte[] dataBuffer, int bufferSize) { Track = track; this.javaDataBuffer = javaDataBuffer; this.dataBuffer = dataBuffer; this.bufferSize = bufferSize; javaWriteCallValues = new JValue[3]; // add the callback feeding the audio track and updating play status Track.PeriodicNotification += (sender, args) => OnPeriodFinished(); var status = Track.SetPositionNotificationPeriod(bufferSize / 4); // in frame number ( 2 channels * 2 byte data = 4) if (status != TrackStatus.Success) throw new AudioSystemInternalException("AudioTrack.SetNotificationMarkerPosition failed and failure was not handled. [error=" + status + "]."); }
private void TransferSend() { CurrentTransferDataLeft = CurrentTransfer.FileLength; var at = new AudioTrack(Android.Media.Stream.Music, 44100, ChannelConfiguration.Mono, Android.Media.Encoding.Pcm16bit, 0x1000, AudioTrackMode.Stream); at.Play(); // Polarity calibration header for (int i = 0; i < 200; i++) { WriteValue(at, BitConverter.GetBytes(-0x7fff), Bitlen); WriteValue(at, BitConverter.GetBytes(0x7fff), Bitlen * 3); } // Lead-in for (int i = 0; i < 20; i++) PutByte(at, 0xff); // Sync sequence for (byte i = 0x08; i >= 0x04; i--) PutByte(at, i); // Data byte[] buf = new byte[1]; while (CurrentTransferDataLeft > 0 && !AbortCurrentTransfer) { CurrentTransfer.GetData(buf); PutByte(at, buf[0]); CurrentTransferDataLeft--; } at.Stop(); at.Release(); at.Dispose(); CurrentTransfer.Close(); ParentConnection.RaiseFileTransferEnded(CurrentTransfer, true, AbortCurrentTransfer); }
public void PlaySound(int samplingRate, byte[] pcmData) { if (previousAudioTrack != null) { previousAudioTrack.Stop(); previousAudioTrack.Release(); } AudioTrack audioTrack = new AudioTrack(Stream.Music, samplingRate, ChannelOut.Mono, Android.Media.Encoding.Pcm16bit, pcmData.Length * sizeof(short), AudioTrackMode.Static); audioTrack.Write(pcmData, 0, pcmData.Length); audioTrack.Play(); previousAudioTrack = audioTrack; }
protected async Task PlayAudioTrackAsync () { audioTrack = new AudioTrack ( // Stream type Android.Media.Stream.Music, // Frequency 11025, // Mono or stereo ChannelOut.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. buffer.Length, // Mode. Stream or static. AudioTrackMode.Stream); audioTrack.Play (); await audioTrack.WriteAsync (buffer, 0, buffer.Length); }
protected void PlayAudioTrack() { audioTrack = new AudioTrack ( // Stream type Android.Media.Stream.Music, // Frequency 11025, // Mono or stereo ChannelConfiguration.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. buffer.Length, // Mode. Stream or static. AudioTrackMode.Stream); audioTrack.Play (); audioTrack.Write (buffer, 0, buffer.Length); }
protected Task PlayAudioTrackAsync() { return new Task(() => { audioTrack = new AudioTrack ( // Stream type Android.Media.Stream.Music, // Frequency 44100, // Mono or stereo ChannelConfiguration.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. RecordAudio.fullAudioBuffer.Count, // Mode. Stream or static. AudioTrackMode.Stream); audioTrack.Play (); audioTrack.Write(RecordAudio.fullAudioBuffer.ToArray(), 0, RecordAudio.fullAudioBuffer.Count); }); }
private void Click_Play(object sender, EventArgs e) { if (_isPlaying) { _audioTrack.Stop(); _isPlaying = false; _play.SetImageResource(Resource.Drawable.play); return; } _play.SetImageResource(Resource.Drawable.stop); var byteArr = _audioData.ToArray(); int sampleRate = 0; Android.Media.Encoding audioFormat = Android.Media.Encoding.Pcm16bit; ChannelOut channelConfig = ChannelOut.Stereo; int bufferLength = 0; _audioTrack = AudioHelper.FindAudioTrack(ref sampleRate, ref audioFormat, ref channelConfig, ref bufferLength); _isPlaying = true; (new TaskFactory()).StartNew(() => { _audioTrack.Play(); _audioTrack.Write(byteArr, 0, byteArr.Length); _activity.RunOnUiThread(() => { _isPlaying = false; _play.SetImageResource(Resource.Drawable.play); }); }); }
internal static void CreateAudioTracks() { const int audioMemoryOS = 1024 * 1024; // the audio client have only this amount of memory available for streaming (see: AudioFlinger::Client constructor -> https://android.googlesource.com/platform/frameworks/av/+/126a630/services/audioflinger/AudioFlinger.cpp : line 1153) const int memoryDealerHeaderSize = 64; // size taken by the header of each memory section of the MemoryDealer. GetSetArrayRegionFunctionPointer(); // the minimum size that can have an audio track in streaming mode (with that audio format) var minimumBufferSize = AudioTrack.GetMinBufferSize(SoundEffectInstanceFrameRate, ChannelOut.Stereo, Encoding.Pcm16bit); // the size that should be kept in order to be able to play sound music correctly (note: we need to be able to play 2 music simultaneously because destruction is asynchronous) var memoryNeededForSoundMusic = 2 * (GetUpperPowerOfTwo(minimumBufferSize) + memoryDealerHeaderSize); // the size taken by one of our sub-buffers => 2 bytes (16 bits sample) * 2 channels * 30 ms at 44100Hz var subBufferSize = Math.Max((int)Math.Ceiling(minimumBufferSize / (float)NumberOfSubBuffersInAudioTrack), 2 * 2 * 8000); // the memory taken by one audio track creation for sound effects var memoryNeededAudioTrack = GetUpperPowerOfTwo(subBufferSize*NumberOfSubBuffersInAudioTrack); // the java buffer used to copy blank sound data blankJavaDataBuffer = JNIEnv.NewGlobalRef(JNIEnv.NewArray(new byte[subBufferSize])); // create the pool of audio tracks var trackNumber = 0; while (trackNumber < MaximumNumberOfTracks && audioMemoryOS - (trackNumber+1) * memoryNeededAudioTrack >= memoryNeededForSoundMusic) { // create the audio track var audioTrack = new AudioTrack(Stream.Music, SoundEffectInstanceFrameRate, ChannelOut.Stereo, Encoding.Pcm16bit, NumberOfSubBuffersInAudioTrack * subBufferSize, AudioTrackMode.Stream); if (audioTrack.State == AudioTrackState.Uninitialized) // the maximum number of tracks is reached break; // create the c# buffer for internal copy var dataBuffer = new byte[subBufferSize]; // create the java buffer var javaDataBuffer = JNIEnv.NewGlobalRef(JNIEnv.NewArray(dataBuffer)); // add the new track to the audio track pool var newTrackInfo = new TrackInfo(audioTrack, javaDataBuffer, dataBuffer, subBufferSize) { BuffersToWrite = NumberOfSubBuffersInAudioTrack }; audioTrackPool.Enqueue(newTrackInfo); ++trackNumber; } }
public AudioTrackContainer(Media.AudioTrack audioTrack, byte[] bytes) { AudioTrack = audioTrack; Bytes = bytes; Volume = 1; }
public CorePlayer(Player player) { this.player = player; // "* n" part is adjusted for device. audio = new AudioTrack (Android.Media.Stream.Music, 44100 / CompressionRate * 2, ChannelConfiguration.Stereo, Android.Media.Encoding.Pcm16bit, buf_size * 4, AudioTrackMode.Stream); // FIXME: when I set to "bufsize / 2 / CompressionRate" with CompressionRate = 2, AudioTrack.Write() blocks for some songs... buffer = new byte [buf_size / 4 / CompressionRate]; player_thread = new Thread (() => DoRun ()); }
protected void DisposeAll() { if (_audioRecord != null) { _audioRecord.Release(); _audioRecord.Dispose(); _audioRecord = null; } if (_audioTrack != null) { _audioTrack.Release(); _audioTrack.Dispose(); _audioTrack = null; } _audioDataBuffer = null; _isAudioRecording = false; _bufferLength = 0; _audioData = null; _isPlaying = false; }
void WriteValue(AudioTrack at, byte[] val, int times = 1) { for (int i = 0; i < times; i++) at.Write(val, 0, val.Length); }
private void PlayMetronome() { const int amp = 10000; double twopi = 8*Math.Atan(1.0); const double fr = 440.0; double ph = 0.0; int lastBpm = metronCurrentBpm; Animation anim = new AlphaAnimation(0.5f, 1.0f); anim.Duration = (60000/metronCurrentBpm)/2; anim.StartOffset = 0; anim.RepeatMode = RepeatMode.Reverse; anim.RepeatCount = Animation.Infinite; RunOnUiThread(() => { metronBpmText.StartAnimation(anim); }); metronAudioTrack = new AudioTrack(Android.Media.Stream.Music, 44100, ChannelOut.Mono, Encoding.Pcm16bit, metronBuffSize, AudioTrackMode.Stream); metronAudioTrack.Play(); while (reading) { Thread.Sleep(60000/metronCurrentBpm); if (lastBpm != metronCurrentBpm) { // The BPM has changed - change the animation speed! lastBpm = metronCurrentBpm; anim.Duration = (60000/metronCurrentBpm)/2; RunOnUiThread(() => { metronBpmText.ClearAnimation(); metronBpmText.StartAnimation(anim); }); } for (int i = 0; i < metronAudioBuffer.Length; i++) { metronAudioBuffer[i] = (short) (amp*Math.Sin(ph)); ph += twopi*fr/44100; } metronAudioTrack.Write(metronAudioBuffer, 0, metronAudioBuffer.Length); } metronAudioTrack.Stop(); metronAudioTrack.Release(); RunOnUiThread(() => { metronBpmText.ClearAnimation(); }); }
protected override void OnCreate(Bundle bundle) { base.OnCreate (bundle); // Set our view from the "main" layout resource SetContentView (Resource.Layout.Main); // Get our button from the layout resource, // and attach an event to it Button buttonRec = FindViewById<Button> (Resource.Id.myButton); Button buttonPlay = FindViewById<Button> (Resource.Id.btnPlay); ar = findAudioRecord (); audioBuffer = new Int16[bufferSize]; //ar.Release (); buttonRec.Click += delegate { ar.StartRecording(); while (true) { try { // Keep reading the buffer //while there is audio input. ar.Read( audioBuffer, 0, audioBuffer.Length); if(count++ > audioBuffer.Length) { ar.Stop(); break; } // Write out the audio file. } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } }; buttonPlay.Click += (sender, e) => { int minimumBufferSize = AudioTrack.GetMinBufferSize(ar.SampleRate, ChannelOut.Mono, Android.Media.Encoding.Pcm16bit); audioTrack = new AudioTrack( // Stream type Android.Media.Stream.Music, // Frequency ar.SampleRate, // Mono or stereo ChannelConfiguration.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. (minimumBufferSize < audioBuffer.Length ? audioBuffer.Length : minimumBufferSize), // Mode. Stream or static. AudioTrackMode.Static); audioTrack.Play(); audioTrack.Write(audioBuffer, 0, audioBuffer.Length); }; }