//@Override //@SuppressLint({ "InlinedApi", "NewApi" }) protected override void encodeWithMediaCodec() { int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2; ((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate); mAudioRecord = new AudioRecord(Android.Media.AudioSource.Mic, mQuality.samplingRate, Android.Media.ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, bufferSize); mMediaCodec = MediaCodec.CreateEncoderByType("audio/mp4a-latm"); MediaFormat format = new MediaFormat(); format.SetString(MediaFormat.KeyMime, "audio/mp4a-latm"); format.SetInteger(MediaFormat.KeyBitRate, mQuality.bitRate); format.SetInteger(MediaFormat.KeyChannelCount, 1); format.SetInteger(MediaFormat.KeySampleRate, mQuality.samplingRate); format.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize); mMediaCodec.Configure(format, null, null, MediaCodecConfigFlags.Encode); mAudioRecord.StartRecording(); mMediaCodec.Start(); MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec); Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers(); mThread = new Thread(this); mThread.Start(); // The packetizer encapsulates this stream in an RTP stream and send it over the network mPacketizer.setInputStream(inputStream); mPacketizer.start(); mStreaming = true; }
public Microphone(int bufferSize = -1) { SupportedSampleRates = (new[] { 8000, 11025, 16000, 22050, 44100 }) .Where(rate => AudioRecord.GetMinBufferSize(rate, ChannelIn.Mono, Encoding.Pcm16bit) > 0) .ToList(); _bufferSize = bufferSize; }
public void Run() { int len = 0, bufferIndex = 0; try { Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers(); int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2; while (!Thread.Interrupted()) { bufferIndex = mMediaCodec.DequeueInputBuffer(10000); if (bufferIndex >= 0) { inputBuffers[bufferIndex].Clear(); len = mAudioRecord.Read(inputBuffers[bufferIndex], bufferSize); if ((len == (int)RecordStatus.ErrorInvalidOperation) || (len == (int)RecordStatus.ErrorBadValue)) { Log.Error(TAG, "An error occured with the AudioRecord API !"); } else { //Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity()); mMediaCodec.QueueInputBuffer(bufferIndex, 0, len, Java.Lang.JavaSystem.NanoTime() / 1000, 0); } } } } catch (RuntimeException e) { e.PrintStackTrace(); } }
/********************************************************************************* * * *********************************************************************************/ public void ButtonRec_Click(object sender, EventArgs e) { Int32 bufferSize = AudioRecord.GetMinBufferSize(mSamplingRate, ChannelIn.Mono, mFormat); System.Diagnostics.Debug.WriteLine("AudioRecord : GetMinBufferSize={0}", bufferSize); RecordBuffer.Instance.Frames = mFrameSize; mAudioRecord = new AudioRecord( //AudioSource.Default, //AudioSource.Camcorder, AudioSource.Mic, //AudioSource.VoiceCommunication, //AudioSource.VoiceRecognition, //AudioSource.VoiceUplink, mSamplingRate, ChannelIn.Mono, mFormat, bufferSize); // 音声データを幾つずつ処理するか( = 1フレームのデータの数) mAudioRecord.SetPositionNotificationPeriod(RecordBuffer.Instance.Frames); // コールバックを指定 mAudioRecord.SetRecordPositionUpdateListener(new OnRecordPositionUpdateListener()); mAudioRecord.StartRecording(); Byte[] dummy = new Byte[1]; mAudioRecord.Read(dummy, 0, dummy.Length); }
public Task <AudioRecordResult> Record(AudioRecordOptions options = null) { _options = options ?? AudioRecordOptions.Empty; _tcs = new TaskCompletionSource <AudioRecordResult>(); _bufferSize = AudioRecord.GetMinBufferSize(_options.SampleRate, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING) * 3; _recorder = new AudioRecord(AudioSource.VoiceRecognition, _options.SampleRate, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, _bufferSize); if (_recorder.State == State.Initialized) { _recorder.StartRecording(); } else { return(Task.FromResult(new AudioRecordResult($"AudioRecord initialisation returned unexpected state ({_recorder.State})"))); } _isRecording = true; _timeoutToken = new CancellationTokenSource(); Task.Run(() => RecordAudio()); Task.Run(() => Timeout()); return(_tcs.Task); }
private int GetRate() { var rate = new int[] { 4000, 8000, 11025, 16000, 22050, 44100 } .Where(k => AudioRecord.GetMinBufferSize(k, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit) != -2) .Last(); return(rate); }
private int determineMinimumBufferSize(int sampleRate, Encoding encoding) { /*int minBufferSize = * AudioRecord.GetMinBufferSize(sampleRate, * AudioFormat.CHANNEL_IN_MONO, encoding); * return minBufferSize;*/ return(AudioRecord.GetMinBufferSize(sampleRate, ChannelIn.Mono, encoding)); }
public Listener() { Rate = GetRate(); _soundStream = new AudioRecord(AudioSource.Mic, Rate, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, AudioRecord.GetMinBufferSize(Rate, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit) * 10); BlockStream = new BlockPickStream(new AudioRecordStream(_soundStream), Rate / 5, 4, 400, Rate * 3); }
/// <summary> /// Record from the microphone and broadcast the buffer. /// </summary> private async Task Record() { this.bufferSize = AudioRecord.GetMinBufferSize(this.SampleRate, ChannelIn.Mono, Encoding.Pcm16bit); var buffer = new byte[this.bufferSize]; var readCount = await this.audioSource.ReadAsync(buffer, 0, this.bufferSize); this.OnBroadcast.Invoke <byte[]>(this, buffer); }
/// <summary> /// Buffers the size. /// </summary> /// <param name="sampleRateInHz">The sample rate in hz.</param> /// <param name="channelConfig">The channel configuration.</param> /// <param name="audioFormat">The audio format.</param> /// <returns></returns> private int BufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { int buffSize = AudioRecord.GetMinBufferSize(sampleRateInHz, ChannelIn.Mono, Encoding.Pcm16bit); if (buffSize < sampleRateInHz) { buffSize = sampleRateInHz; } return(buffSize); }
AudioRecord GetAudioRecord(int sampleRate) { this.sampleRate = sampleRate; var channelConfig = ChannelIn.Mono; var encoding = Encoding.Pcm16bit; bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channelConfig, encoding) * 8; return(new AudioRecord(AudioSource.Mic, sampleRate, ChannelIn.Stereo, encoding, bufferSize)); }
public Recorder() { _sampleRate = Init(); _bufferSize = AudioRecord.GetMinBufferSize(_sampleRate, Channel, AudioEncoding) * 3; _recorder = new AudioRecord(AudioSource.Mic, _sampleRate, Channel, AudioEncoding, _bufferSize); }
public string Prepare() { ChannelIn ch = ChannelIn.Mono; if (CHANNELS == 2) { ch = ChannelIn.Stereo; } bufferSize = AudioRecord.GetMinBufferSize (SAMPLING_RATE, ch, Android.Media.Encoding.Pcm16bit) * 3; var recordingId = Guid.NewGuid().ToString(); var fileName = $"{recordingId}.{FILE_EXTENSION}"; endRecording = false; audioBuffer = new System.Byte[bufferSize]; try { filePath = GetFullPathNameForRecording(fileName); audioRecord = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency SAMPLING_RATE, // Mono or stereo ch, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. audioBuffer.Length ); output = new AudioFile() { FileName = fileName, BitDepth = BIT_RATE, SampleRate = SAMPLING_RATE, NumberOfChannels = CHANNELS, ContentType = MIME_TYPE, }; currentState = State.Prepared; return(recordingId); } catch (IllegalStateException e) { throw new RecordingException(e.ToString()); } }
public AndroidRecorder() { foreach (int rate in new int[] { 8000, 11025, 16000, 22050, 44100 }) { bufferSize = AudioRecord.GetMinBufferSize(rate, ChannelIn.Mono, ENCODING); if (bufferSize > 0) { sample_rate = rate; break; } } }
/// <summary> /// Initializes all recording parameters. /// </summary> public void RecorderInit() { // Get the minimum buffer size required for the successful creation of an AudioRecord object. _bufferSizeInBytes = AudioRecord.GetMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING); // Initialize Audio Recorder. _audioRecorder = new AudioRecord(AudioSource.Mic, RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, _bufferSizeInBytes); _audioRecorder.StartRecording(); }
private void InitRecorder() { var minBufferSize = AudioRecord.GetMinBufferSize(SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, ENCODING); audioRecord = new AudioRecord(AudioSource.Mic, SAMPLE_RATE_IN_HZ, CHANNEL_CONFIG, ENCODING, minBufferSize); vad.Enabled = config.VoiceActivityDetectionEnabled; vad.SpeechBegin += Vad_SpeechBegin; vad.SpeechEnd += Vad_SpeechEnd; vad.SpeechNotDetected += Vad_SpeechNotDetected;; vad.AudioLevelChange += Vad_AudioLevelChange; }
public static int Init() { foreach (var rate in SAMPLE_RATES) { // add the rates you wish to check against var bufferSize = AudioRecord.GetMinBufferSize(rate, Channel, AudioEncoding); if (bufferSize > 0) { return(rate); } } throw new NotSupportedException("Sample rate is not supported."); }
/// <summary> /// Initializes a new instance of the <see cref="AudioStream"/> class. /// </summary> /// <param name="sampleRate">Sample rate.</param> /// <param name="channels">The <see cref="ChannelIn"/> value representing the number of channels to record.</param> /// <param name="audioFormat">The format of the recorded audio.</param> public AudioStream(int sampleRate = 44100, ChannelIn channels = ChannelIn.Mono, Encoding audioFormat = Encoding.Pcm16bit) { bufferSize = AudioRecord.GetMinBufferSize(sampleRate, channels, audioFormat); if (bufferSize < 0) { throw new Exception("Invalid buffer size calculated; audio settings used may not be supported on this device"); } SampleRate = sampleRate; this.channels = channels; this.audioFormat = audioFormat; }
private void InitialiseRecorder() { var context = CrossCurrentActivity.Current.Activity; var audioManager = context.GetSystemService(Context.AudioService) as AudioManager; this._sampleRate = Int32.Parse(audioManager.GetProperty(AudioManager.PropertyOutputSampleRate)); if (this._audioRecord != null) { this._audioRecord.Release(); } this._bufferSize = AudioRecord.GetMinBufferSize(this._sampleRate, this.MONO_CHANNEL, this.ENCODING); this._audioRecord = new AudioRecord(AudioSource.Mic, this._sampleRate, this.STEREO_CHANNEL, this.ENCODING, this._bufferSize); }
private void RecordAudio() { if (File.Exists(wavPath)) { File.Delete(wavPath); } System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.CreateNew); BinaryWriter bWriter = new BinaryWriter(outputStream); int bufferSize = AudioRecord.GetMinBufferSize(11025, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit); audioBuffer = new byte[bufferSize]; audRecorder = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 11025, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. bufferSize ); audRecorder.StartRecording(); while (_isRecording == true) { try { /// Keep reading the buffer while there is audio input. audioData = audRecorder.Read(audioBuffer, 0, audioBuffer.Length); bWriter.Write(audioBuffer); } catch (System.Exception ex) { System.Console.Out.WriteLine(ex.Message); MessagingCenter.Send <ISoundRecorder, bool>(this, "finishReplaying", true); break; } } outputStream.Close(); bWriter.Close(); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); // Set our view from the "main" layout resource SetContentView(Resource.Layout.activity_debugaudio_towav); InitCharts(); SetButtonHandlers(); EnableButtons(false); // bufferSize (returns 2048 // not used) bufferSize = AudioRecord.GetMinBufferSize(8000, Android.Media.ChannelIn.Mono, Android.Media.Encoding.Pcm16bit); }
public SoundCapture(int sampleSize, int buffermilliseconds) { if (buffermilliseconds > 1000) { throw new ArgumentOutOfRangeException(nameof(buffermilliseconds)); } var pushsize = sampleSize / (1000 / buffermilliseconds); var minbuffersize = AudioRecord.GetMinBufferSize(sampleSize, ChannelIn.Mono, Encoding.Pcm16bit); if (pushsize < minbuffersize) { throw new ArgumentException($"MinBufferSize is {minbuffersize}byte"); } _record = new AudioRecord(AudioSource.Default, sampleSize, ChannelIn.Mono, Encoding.Pcm16bit, pushsize); _buffer = new short[pushsize / 2]; }
private void Initialize() { var sampleRates = new[] { 8000, 11025, 16000, 22050, 44100 }; _cacheFolder = MainActivity.Instance.CacheDir.AbsolutePath; foreach (int rate in sampleRates.Reverse()) { _bufferSize = AudioRecord.GetMinBufferSize(rate, _recorderChannels, _recorderAudioEncoding); _deviceService.AudioSampleRate = rate; if (_bufferSize > 0) { return; } } throw new Exception("Could not find valid sample rate and buffer size configuration."); }
public void StartRecording() { var context = Plugin.CurrentActivity.CrossCurrentActivity.Current.Activity; var audioManager = (AudioManager)context.GetSystemService(Context.AudioService); _recorderSamplerate = int.Parse(audioManager.GetProperty(AudioManager.PropertyOutputSampleRate)); _recorder?.Release(); _bufferSize = AudioRecord.GetMinBufferSize(_recorderSamplerate, ChannelIn.Stereo, Encoding.Pcm16bit) * 3; _recorder = new AudioRecord(AudioSource.Mic, _recorderSamplerate, RecorderChannels, RecorderAudioEncoding, _bufferSize); _recorder.StartRecording(); _isRecording = true; _token = new CancellationTokenSource(); Task.Run(() => WriteAudioDataToFile(), _token.Token); }
public override void Run() { Android.OS.Process.SetThreadPriority(ThreadPriority.UrgentAudio); AudioRecord recorder = null; short[][] buffers = new short[256][]; int ix = 0; for (int i = 0; i < 256; i++) { buffers[i] = new short[160]; } try { // ... initialise int N = AudioRecord.GetMinBufferSize(8000, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit); recorder = new AudioRecord(AudioSource.Mic, 8000, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, N * 10); recorder.StartRecording(); // ... loop while (!stopped) { short[] buffer = buffers[ix++ % buffers.Length]; N = recorder.Read(buffer, 0, buffer.Length); //process is what you will do with the data...not defined here process(buffer); } } catch (Throwable x) { Console.WriteLine(x.GetType() + " " + x.Message); } finally { close(); } }
public Task Start(Action <float[]> callback) { if (!_isrecording) { _isrecording = true; var minbuffersize = AudioRecord.GetMinBufferSize(_sampleRate, ChannelIn.Mono, Encoding.Pcm16bit); if (_bufferSize * 2 < minbuffersize) { throw new ArgumentException($"MinBufferSize is {minbuffersize}byte"); } _record = new AudioRecord(AudioSource.Default, _sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, _bufferSize * 2); _thread = new Thread(ReadThread); _thread.Start(); } _callback = callback; return(Task.CompletedTask); }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); SetContentView(Resource.Layout.Main); // for the textview setting text tv = FindViewById <TextView>(Resource.Id.text1); System.Timers.Timer timer = new System.Timers.Timer(); timer.Interval = 500; timer.Elapsed += OnTimedEvent; try { bufferSize = AudioRecord .GetMinBufferSize(sampleRate, ChannelIn.Mono, Encoding.Pcm16bit); } catch (ArgumentException e) { //android.util.Log.e("TrackingFlow", "Exception", e); } Button b = FindViewById <Button>(Resource.Id.button1); audio = new AudioRecord(AudioSource.Mic, sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, bufferSize); audio.StartRecording(); b.Click += delegate { if (!clicked) { clicked = true; b.Text = "STOP"; timer.Enabled = true; } else { b.Text = "START"; clicked = false; timer.Enabled = false; } }; }
public async void StartRecording() { if (_recorder != null) { StopRecording(); } var context = Android.App.Application.Context; var audioManager = (AudioManager)context.GetSystemService(Context.AudioService); _samplingRate = int.Parse(audioManager.GetProperty(AudioManager.PropertyOutputSampleRate)); //_bufferSize = 4 * AudioRecord.GetMinBufferSize(_samplingRate, ChannelIn.Mono, Encoding.PcmFloat); _bufferSize = 4 * AudioRecord.GetMinBufferSize(_samplingRate, ChannelIn.Mono, Encoding.Pcm16bit); _recorder = new AudioRecord(AudioSource.Mic, _samplingRate, _channelCount, _audioEncodingType, _bufferSize); //uncomment for PcmFloat mode: ===================== //_sizeInFloats = _bufferSize / sizeof(float); //instead of Pcm16bit: ============================= _sizeInFloats = _bufferSize / sizeof(short); _data = new float[1][]; _data[0] = new float[_sizeInFloats]; // only one channel (mono) _bytes = new byte[_bufferSize]; _temp = new byte[_sizeInFloats * sizeof(float)]; var options = new PitchOptions { SamplingRate = _samplingRate, FrameDuration = (double)_sizeInFloats / _samplingRate }; _pitchExtractor = new PitchExtractor(options); _pitch = new float[1]; _robotizer = new RobotEffect(216, 1024); _recorder.StartRecording(); _isRecording = true; await ProcessAudioData(); }
/// <summary> /// Starts the specified sample rate. /// </summary> /// <param name="sampleRate">The sample rate.</param> /// <returns>Task<System.Boolean>.</returns> public Task <bool> Start(int sampleRate) { return(Task.Run( () => { if (!SupportedSampleRates.Contains(sampleRate)) { return false; } _bufferSize = AudioRecord.GetMinBufferSize(sampleRate, ChannelIn.Mono, Encoding.Pcm16bit); _audioSource = new AudioRecord(AudioSource.Mic, sampleRate, ChannelIn.Mono, Encoding.Pcm16bit, _bufferSize); StartRecording(); return true; })); }
public void StartRecording() { var context = Plugin.CurrentActivity.CrossCurrentActivity.Current.Activity; var audioManager = (AudioManager)context.GetSystemService(Context.AudioService); RECORDER_SAMPLERATE = Int32.Parse(audioManager.GetProperty(AudioManager.PropertyOutputSampleRate)); if (recorder != null) { recorder.Release(); } bufferSize = AudioRecord.GetMinBufferSize(RECORDER_SAMPLERATE, ChannelIn.Mono, Encoding.Pcm16bit); recorder = new AudioRecord(AudioSource.Mic, RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, bufferSize); recorder.StartRecording(); isRecording = true; token = new CancellationTokenSource(); Task.Run(() => WriteAudioDataToFile(), token.Token); }