private void Initialize() { if (IsStarted) { return; } try { if (Android.OS.Build.VERSION.SdkInt > Android.OS.BuildVersionCodes.JellyBean) { try { var audioService = (AudioManager)Android.App.Application.Context.GetSystemService(Context.AudioService); var propertyOutputSampleRate = audioService.GetProperty(AudioManager.PropertyOutputSampleRate); if (!string.IsNullOrEmpty(propertyOutputSampleRate) && int.TryParse(propertyOutputSampleRate, out int sampleRate)) { this.m_SampleRate = sampleRate; } } catch { } } Android.OS.Process.SetThreadPriority(Android.OS.ThreadPriority.UrgentAudio); m_AudioRecord = new Android.Media.AudioRecord(this.MediaAudioSource, m_SampleRate, Channelin, MediaEncoding, m_MinSize); m_AudioRecord.StartRecording(); } catch { } }
protected async Task StartRecorderAsync () { endRecording = false; isRecording = true; RaiseRecordingStateChangedEvent (); audioBuffer = new Byte[100000]; audioRecord = new AudioRecord ( // Hardware source of recording. AudioSource.Mic, // Frequency 11025, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. audioBuffer.Length ); audioRecord.StartRecording (); // Off line this so that we do not block the UI thread. await ReadAudioAsync (); }
protected void StartRecorder() { endRecording = false; isRecording = true; audioBuffer = new Byte[100000]; audioRecord = new AudioRecord ( // Hardware source of recording. AudioSource.Mic, // Frequency 11025, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. audioBuffer.Length ); audioRecord.StartRecording (); // Off line this so that we do not block the UI thread. Thread thread = new Thread (new ThreadStart (ReadAudio)); thread.Start (); }
public AudioIn(int samplesPerBatch) { if ((1000f / ((float)SAMPLE_RATE / samplesPerBatch) % 1f != 0f)) { throw new ArgumentException("Samples per batch must make up an integer number of milliseconds."); } this.samplesPerBatch = samplesPerBatch; rawData = new byte[samplesPerBatch * 2]; data = new float[samplesPerBatch]; audioRecord = new AudioRecord(AudioSource.Mic, SAMPLE_RATE, ChannelIn.Mono, Encoding.Pcm16bit, samplesPerBatch * 2 * 10); // 10 Batches in the buffer. }
protected Task StartRecorderAsync() { isRecording = true; audioRecord = openAudio(); if (audioRecord != null) { audioRecord.StartRecording(); // Off line this so that we do not block the UI thread. return ReadAudioAsync(); } return null; }
private void CleanUp() { if (null != m_AudioRecord) { try { m_AudioRecord.Stop(); m_AudioRecord.Release(); m_AudioRecord.Dispose(); } catch { } m_AudioRecord = null; } }
/// <summary> /// Must be called before beginning initial audio options. Can be recalled to set new file path safely. /// </summary> /// <param name="_filePath">Output folder path to save file with filename and extension. Example: Assets/RecordedAudio/newAudioFile.mp3</param> public void PrepareAudioRecorder(string _filePath, bool enableNoiseSuppression) { if (!prepared) { bufferSizeBytes = AudioRecord.GetMinBufferSize(SAMPPERSEC, channelConfiguration, audioEncoding); buffer = new short[bufferSizeBytes]; bufferLength = bufferSizeBytes / 2; audioRecorder = new AudioRecord(AudioSource.Mic, SAMPPERSEC, channelConfiguration, audioEncoding, bufferSizeBytes); isNoiseSupression = enableNoiseSuppression; prepared = true; } filePath = _filePath; }
public static AudioRecord FindAudioRecord(ref int sampleRate, ref Android.Media.Encoding audioFormat, ref ChannelIn channelConfig, ref int bufferSize) { foreach (int sr in _sampleRates) { foreach (var af in new Android.Media.Encoding[] { Android.Media.Encoding.Pcm16bit, Android.Media.Encoding.Pcm8bit }) { foreach (var cc in new ChannelIn[] { ChannelIn.Stereo, ChannelIn.Mono }) { try { // Log.Debug(C.TAG, "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " // + channelConfig); int bs = AudioRecord.GetMinBufferSize(sr, cc, af); if (bs > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.Default, sr, cc, af, bs); if (recorder.State == State.Initialized) { bufferSize = bs; sampleRate = sr; audioFormat = af; channelConfig = cc; return recorder; } } } catch (Exception e) { // Log.e(C.TAG, rate + "Exception, keep trying.", e); } } } } return null; }
/// <summary> /// Initializes a new instance of the <see cref="Xamarin.Forms.Labs.Droid.Services.Media.Microphone"/> class. /// </summary> /// <param name="sampleRate">Sample rate.</param> /// <param name="bufferSize">Buffer size.</param> public Microphone(int sampleRate, int bufferSize) { this.bufferSize = bufferSize; this.audioSource = new AudioRecord( AudioSource.Mic, sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, this.bufferSize); this.Start = new RelayCommand<int>( this.StartRecording, rate => this.SupportedSampleRates.Contains(rate) && this.audioSource != null && !this.Active ); this.Stop = new Command( () => this.audioSource.Stop(), () => this.Active ); this.SupportedSampleRates = (new[] { 8000, 11025, 16000, 22050, 44100 }).Where( rate => AudioRecord.GetMinBufferSize(rate, ChannelIn.Default, Encoding.Pcm16bit) > 0 ).ToList(); }
public AudioRecord findAudioRecord() { int[] mSampleRates = new int[] { 8000, 11025, 22050, 44100 }; foreach (int rate in mSampleRates) { foreach (var channelConfig in new ChannelIn[] { ChannelIn.Mono, ChannelIn.Stereo }) { try { Log.Debug ("", "Attempting rate " + rate + "Hz, bits: " + Android.Media.Encoding.Pcm16bit + ", channel: " + channelConfig); bufferSize = AudioRecord.GetMinBufferSize (rate, channelConfig, Android.Media.Encoding.Pcm16bit); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord (AudioSource.Mic, rate, channelConfig, Android.Media.Encoding.Pcm16bit, bufferSize); if (recorder.State == State.Initialized) return recorder; } } catch (Exception e) { Log.Error ("Error", rate + "Exception, keep trying.", e); } } } return null; }
public void OnPeriodicNotification(AudioRecord recorder) { if (_audioRecord == null) return; _audioRecord.Read(_buffer, 0, _buffer.Length); try { lock (_randomAccessLock) { if (_randomAccessWriter == null) return; _randomAccessWriter.Write(_buffer); _payloadSize += _buffer.Length; } } catch (IOException e) { Log.Error(Tag, "Error occured in updateListener, recording is aborted" + e.Message + e.StackTrace); State = WavRecorderState.Error; } }
public void OnMarkerReached(AudioRecord recorder) { }
private AudioRecord CreateAudioRecord(int sampleRate) //todo: refactor this { Thread.Sleep(200); try { _sampleRate = sampleRate; _framePeriod = _sampleRate * TimerInterval / 1000; _bufferSize = _framePeriod * 2 * FmtAvgBps * Channels / 8; var minBufferSize = AudioRecord.GetMinBufferSize(_sampleRate, ChannelIn.Mono, Encoding.Pcm16bit); if (BufferSizeError(minBufferSize)) { State = WavRecorderState.Error; return null; } if (_bufferSize < minBufferSize) { // Check to make sure buffer size is not smaller than the smallest allowed one _bufferSize = AudioRecord.GetMinBufferSize(_sampleRate, ChannelIn.Mono, Encoding.Pcm16bit); // Set frame period and timer interval accordingly _framePeriod = _bufferSize / (2 * FmtAvgBps * Channels / 8); Log.Debug(Tag, "Increasing buffer size to " + _bufferSize); } var audioRecord = new AudioRecord(AudioSource.Mic, _sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, _bufferSize); audioRecord.SetRecordPositionUpdateListener(this); audioRecord.SetPositionNotificationPeriod(_framePeriod); return audioRecord; } catch (Exception e) { Log.Error(Tag, e.Message + e.StackTrace); State = WavRecorderState.Error; return null; } }
private void InitializeAudioRecord(int sampleRate) { _audioRecord = CreateAudioRecord(sampleRate); if (_audioRecord != null && _audioRecord.State == Android.Media.State.Initialized) { State = WavRecorderState.Initializing; return; } Log.Error(Tag, "Failed to initialize AudioRecord with sampleRate: " + sampleRate); State = WavRecorderState.Error; }
private AudioRecord openAudio() { // 44100, 22050, 16000, 11025, 8000. int[] samplingRates = {44100, 22050, 16000, 11025, 8000}; for (int i = 0; i < samplingRates.Length; ++i) { try { int min = AudioRecord.GetMinBufferSize(samplingRates[i], ChannelIn.Mono, Encoding.Pcm16bit); if (min < 4096) min = 4096; AudioRecord record = new AudioRecord(AudioSource.Mic, samplingRates[i], ChannelIn.Mono, Encoding.Pcm16bit, min); if (record.State == State.Initialized) { Logger.LogThis("Audio recorder initialised at " + record.SampleRate, "openAudio", "RecordAudio"); Toast.MakeText(Android.App.Application.Context,"Audio recorder initialised at " + record.SampleRate, ToastLength.Short); return record; } record.Release(); record = null; } catch (Exception e) { // Meh. Try the next one. } } // None worked. return null; }
protected void DisposeAll() { if (_audioRecord != null) { _audioRecord.Release(); _audioRecord.Dispose(); _audioRecord = null; } if (_audioTrack != null) { _audioTrack.Release(); _audioTrack.Dispose(); _audioTrack = null; } _audioDataBuffer = null; _isAudioRecording = false; _bufferLength = 0; _audioData = null; _isPlaying = false; }
private void InitializeAudioRecord() { _audioRecord = FindAudioRecord(); if (_audioRecord == null) { Log.Error(Tag, "No fitting audio record found."); State = WavRecorderState.Error; return; } _filePath = null; State = WavRecorderState.Initializing; }
protected override void OnCreate(Bundle bundle) { base.OnCreate (bundle); // Set our view from the "main" layout resource SetContentView (Resource.Layout.Main); // Get our button from the layout resource, // and attach an event to it Button buttonRec = FindViewById<Button> (Resource.Id.myButton); Button buttonPlay = FindViewById<Button> (Resource.Id.btnPlay); ar = findAudioRecord (); audioBuffer = new Int16[bufferSize]; //ar.Release (); buttonRec.Click += delegate { ar.StartRecording(); while (true) { try { // Keep reading the buffer //while there is audio input. ar.Read( audioBuffer, 0, audioBuffer.Length); if(count++ > audioBuffer.Length) { ar.Stop(); break; } // Write out the audio file. } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } }; buttonPlay.Click += (sender, e) => { int minimumBufferSize = AudioTrack.GetMinBufferSize(ar.SampleRate, ChannelOut.Mono, Android.Media.Encoding.Pcm16bit); audioTrack = new AudioTrack( // Stream type Android.Media.Stream.Music, // Frequency ar.SampleRate, // Mono or stereo ChannelConfiguration.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. (minimumBufferSize < audioBuffer.Length ? audioBuffer.Length : minimumBufferSize), // Mode. Stream or static. AudioTrackMode.Static); audioTrack.Play(); audioTrack.Write(audioBuffer, 0, audioBuffer.Length); }; }
/// <summary> /// Initializes a new instance of the <see cref="SimplyMobile.Media.AudioStream"/> class. /// </summary> /// <param name="sampleRate">Sample rate.</param> /// <param name="bufferSize">Buffer size.</param> public AudioStream(int sampleRate, int bufferSize) { this.bufferSize = bufferSize; this.audioSource = new AudioRecord( AudioStream.DefaultDevice, sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, this.bufferSize); }
public Task<bool> Start(int sampleRate) { return Task.Run<bool>(() => { if (!this.SupportedSampleRates.Contains(sampleRate)) { return false; } this.audioSource = new AudioRecord( AudioSource.Mic, sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, this.bufferSize); this.StartRecording(); return true; }); }
/// <summary> /// Cleanup. Call this before destoying object; safely releases recorder. /// </summary> public void Dispose() { if (audioRecorder != null) { if (isRecording) { StopAudio(); } audioRecorder.Release(); audioRecorder.Dispose(); audioRecorder = null; filePath = ""; prepared = false; } }
private void Click_Record(object sender, EventArgs e) { _stop.Visibility = ViewStates.Visible; _record.Visibility = ViewStates.Gone; _play.Visibility = ViewStates.Gone; _audioRecord = AudioHelper.FindAudioRecord(ref _sampleAudioBitRate, ref _audioFormat, ref _channelConfig, ref _bufferLength); _audioDataBuffer = new byte[_bufferLength]; _audioData = new List<byte>(); _audioRecord.StartRecording(); _isAudioRecording = true; (new TaskFactory()).StartNew(() => { while (_isAudioRecording) { _audioRecord.Read(_audioDataBuffer, 0, _audioDataBuffer.Length); _audioData.AddRange(_audioDataBuffer); } }); }
public void Release() { Log.Debug(Tag, "before release recorder"); if (IsRecording) { Log.Error(Tag, "Release(): Tried to Release running WavRecorder. Please stop the recoder before release."); throw new IllegalStateException(); } try { lock (_randomAccessLock) { if (_randomAccessWriter != null) { _randomAccessWriter.Close(); _randomAccessWriter.Dispose(); _randomAccessWriter = null; } } } catch (IOException e) { Log.Error(Tag, "Release(): I/O exception occured while closing output file" + e.Message + e.StackTrace); } //if(File.Exists(_filePath)) File.Delete(_filePath); if (_audioRecord != null) { _audioRecord.Stop(); _audioRecord.Release(); _audioRecord.Dispose(); _audioRecord = null; } Log.Debug(Tag, "after release recorder"); }
/// <summary> /// Starts the specified sample rate. /// </summary> /// <param name="sampleRate">The sample rate.</param> /// <returns>Task<System.Boolean>.</returns> public Task<bool> Start(int sampleRate) { return Task.Run( () => { if (!SupportedSampleRates.Contains(sampleRate)) { return false; } _bufferSize = AudioRecord.GetMinBufferSize(sampleRate, ChannelIn.Mono, Encoding.Pcm16bit); _audioSource = new AudioRecord(AudioSource.Mic, sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, _bufferSize); StartRecording(); return true; }); }
//public void Reset() //{ // try // { // if (HasError) // { // Log.Debug(Tag, "Recorder was in Error state while reset."); // return; // } // Log.Debug(Tag, "before reset recorder"); // Release(); // _filePath = null; // //_audioRecord = new AudioRecord(AudioSource.Mic, _sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, _bufferSize); // //State = WavRecorderState.Initializing; // InitializeAudioRecord(_sampleRate); // Log.Debug(Tag, "after reset recorder"); // } // catch (Exception e) // { // Log.Error(Tag, e.Message); // State = WavRecorderState.Error; // } //} public void Release() { Log.Debug(Tag, "before release recorder"); if (IsRecording) { Stop(); } else if (IsReady) { try { lock (_randomAccessLock) { if (_randomAccessWriter != null) { _randomAccessWriter.Close(); _randomAccessWriter = null; } } } catch (IOException e) { Log.Error(Tag, "I/O exception occured while closing output file" + e.Message); } File.Delete(_filePath); } if (_audioRecord != null) { _audioRecord.Release(); _audioRecord = null; } Log.Debug(Tag, "after release recorder"); }
public Task Stop() { return Task.Run(() => { this.audioSource.Stop(); this.audioSource = null; }); }
protected async Task StartRecorderAsync() { endRecording = false; IsRecording = true; RaiseRecordingStateChangedEvent(); var bufferSize = AudioRecord.GetMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING); audioBuffer = new byte[bufferSize]; audioRecord = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency RECORDER_SAMPLERATE, // Mono or stereo RECORDER_CHANNELS, // Audio encoding RECORDER_AUDIO_ENCODING, // Length of the audio clip. audioBuffer.Length ); audioRecord.StartRecording(); // Off line this so that we do not block the UI thread. await ReadAudioAsync(); }