private async Task RecordAudioAsync() { wavPath = Path.Combine(audioDir, Guid.NewGuid().ToString() + "_audio.wav"); byte[] audioBuffer = new byte[8000]; audioRecord = new AudioRecord( AudioSource.Mic, // Hardware source of recording. sampleRate, // Frequency channelIn, // Mono or stereo encoding, // Audio encoding audioBuffer.Length // Length of the audio clip. ); var id = audioRecord.AudioSessionId; audioRecord.StartRecording(); int totalAudioLen = 0; isRecording = true; using (System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.Create)) using (BinaryWriter bWriter = new BinaryWriter(outputStream)) { //init a header with no length - it will be added later WriteWaveFileHeader(bWriter, maxAudioFreamesLength); /// Keep reading the buffer while there is audio input. while (isRecording && totalAudioLen <= maxAudioFreamesLength) { totalAudioLen += await audioRecord.ReadAsync(audioBuffer, 0, audioBuffer.Length); bWriter.Write(audioBuffer); //analysis var intbuffer = ByteArrayTo16Bit(audioBuffer); var min = intbuffer.Min(); var max = intbuffer.Max(); var avg = intbuffer.Average(x => (double)x); var sos = intbuffer.Select(x => (long)x) .Aggregate((prev, next) => prev + next * next); var rms = Math.Sqrt((double)1 / intbuffer.Length * sos); var fft = FFT(intbuffer); } isRecording = false; //write lenght to header outputStream.Close(); bWriter.Close(); } audioRecord.Stop(); audioRecord.Dispose(); //this file is now fully written and can be sent to server for analysis OnAudioReadyForUpload(new AudioUploadEventArgs(DateTime.Now.ToUniversalTime(), wavPath)); }
public void start() { if (running) { Logging.warn("Audio recorder is already running."); return; } stopRecording = false; running = true; bufferSize = AudioTrack.GetMinBufferSize(44100, ChannelOut.Mono, Encoding.Pcm16bit) * 10; audioRecorder = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 44100, // Mono or stereo ChannelIn.Mono, // Audio encoding Encoding.Pcm16bit, // Length of the audio clip. bufferSize ); audioRecorder.StartRecording(); Thread recordingThread = new Thread(readLoop); recordingThread.Start(); }
public Task <AudioRecordResult> Record(AudioRecordOptions options = null) { _options = options ?? AudioRecordOptions.Empty; _tcs = new TaskCompletionSource <AudioRecordResult>(); _bufferSize = AudioRecord.GetMinBufferSize(_options.SampleRate, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING) * 3; _recorder = new AudioRecord(AudioSource.VoiceRecognition, _options.SampleRate, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, _bufferSize); if (_recorder.State == State.Initialized) { _recorder.StartRecording(); } else { return(Task.FromResult(new AudioRecordResult($"AudioRecord initialisation returned unexpected state ({_recorder.State})"))); } _isRecording = true; _timeoutToken = new CancellationTokenSource(); Task.Run(() => RecordAudio()); Task.Run(() => Timeout()); return(_tcs.Task); }
private void StartRecording() { WvlLogger.Log(LogType.TraceAll, "StartRecording()"); //recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, // RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, bufferSize); recorder = new AudioRecord(AudioSource.Mic, RECORDER_SAMPLERATE, (ChannelIn)RECORDER_CHANNELS, (Android.Media.Encoding)RECORDER_AUDIO_ENCODING, bufferSize); int i = (int)recorder.State; if (i == 1) { recorder.StartRecording(); } isRecording = true; /* * recordingThread = new Thread(new Runnable() { * @Override * public void run() * { * writeAudioDataToFile(); * } * },"AudioRecorder Thread"); * recordingThread.start(); */ recordingThread = new System.Threading.Thread(new ThreadStart( WriteAudioDataToFile )); recordingThread.Start(); }
public byte[] StartRecording() { record.StartRecording(); probki = record.Read(audioData, 0, buffSizeInBytes); return(audioData); }
protected void StartRecorder() { endRecording = false; isRecording = true; audioBuffer = new Byte[100000]; audioRecord = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 11025, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. audioBuffer.Length ); audioRecord.StartRecording(); // Off line this so that we do not block the UI thread. Thread thread = new Thread(new ThreadStart(ReadAudio)); thread.Start(); }
private void initRecorder() { Encoding encoding = Encoding.Pcm16bit; shortsBuffer = new short[bufferSize]; buffer = new byte[bufferSize]; audioRecorder = new AudioRecord( // Hardware source of recording. AudioSource.VoiceCommunication, // Frequency sampleRate, // Mono or stereo ChannelIn.Mono, // Audio encoding encoding, // Length of the audio clip. bufferSize * 5 ); audioRecorder.StartRecording(); if (AcousticEchoCanceler.IsAvailable) { echoCanceller = AcousticEchoCanceler.Create(audioRecorder.AudioSessionId); } if (NoiseSuppressor.IsAvailable) { noiseSuppressor = NoiseSuppressor.Create(audioRecorder.AudioSessionId); } }
public void StartRecord() { if (audioRecord == null) { audioRecord = new AudioRecord(AudioSource.Mic, 44100, ChannelIn.Mono, Encoding.Pcm16bit, 2048 * sizeof(byte)); if (audioRecord.State != State.Initialized) { throw new InvalidOperationException("This device doesn't support AudioRecord"); } } //audioRecord.SetRecordPositionUpdateListener() audioRecord.StartRecording(); while (audioRecord.RecordingState == RecordState.Recording) { try { OnNext(); } catch (Exception ex) { throw ex; } } }
public void StartRecording() { System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.Create); bWriter = new BinaryWriter(outputStream); audioBuffer = new byte[44100 * 5]; // 44100 sample rate * 10 sek (max time) audRecorder = new AudioRecord(AudioSource.Mic, 44100, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, audioBuffer.Length); // long longSampleRate = 44100; // int channels = 2; // long byteRate = 16*longSampleRate*channels/8; // // long totalAudioLen = audioBuffer.Length; // long totalDataLen = totalAudioLen + 36; // // WriteWaveFileHeader(bWriter, // totalAudioLen, // totalDataLen, // longSampleRate, // channels, // byteRate); IsRecording = true; audRecorder.StartRecording(); SaveBinaryAudio(outputStream); }
/********************************************************************************* * * *********************************************************************************/ public void ButtonRec_Click(object sender, EventArgs e) { Int32 bufferSize = AudioRecord.GetMinBufferSize(mSamplingRate, ChannelIn.Mono, mFormat); System.Diagnostics.Debug.WriteLine("AudioRecord : GetMinBufferSize={0}", bufferSize); RecordBuffer.Instance.Frames = mFrameSize; mAudioRecord = new AudioRecord( //AudioSource.Default, //AudioSource.Camcorder, AudioSource.Mic, //AudioSource.VoiceCommunication, //AudioSource.VoiceRecognition, //AudioSource.VoiceUplink, mSamplingRate, ChannelIn.Mono, mFormat, bufferSize); // 音声データを幾つずつ処理するか( = 1フレームのデータの数) mAudioRecord.SetPositionNotificationPeriod(RecordBuffer.Instance.Frames); // コールバックを指定 mAudioRecord.SetRecordPositionUpdateListener(new OnRecordPositionUpdateListener()); mAudioRecord.StartRecording(); Byte[] dummy = new Byte[1]; mAudioRecord.Read(dummy, 0, dummy.Length); }
public Task StartRecordingAsync() { audioRecorder.StartRecording(); // Off line this so that we do not block the UI thread. HandleBufferAsync(); return(new Task(() => { })); }
/// <summary> /// Initializes all recording parameters. /// </summary> public void RecorderInit() { // Get the minimum buffer size required for the successful creation of an AudioRecord object. _bufferSizeInBytes = AudioRecord.GetMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING); // Initialize Audio Recorder. _audioRecorder = new AudioRecord(AudioSource.Mic, RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, _bufferSizeInBytes); _audioRecorder.StartRecording(); }
void StartRecording() { _audioSource.StartRecording(); Task.Run( async() => { do { await Record(); }while (IsActive); }); }
private void ReadThread() { _record.StartRecording(); while (_isrecording) { var size = _record.Read(_buffer, 0, _buffer.Length); var result = new byte[size * 2]; Buffer.BlockCopy(_buffer, 0, result, 0, result.Length); _readSubject.OnNext(result); } _record.Stop(); }
public static async Task Start(string filename) { _endRecording = false; _audioBuffer = new Byte[100000]; _audioRecord = new AudioRecord( AudioSource.Mic, ConfigService.AudioFrequency, ChannelIn.Mono, ConfigService.AudioBitrate, _audioBuffer.Length ); _audioRecord.StartRecording(); await ReadAudioAsync(filename); }
protected async Task StartRecorderAsync() { endRecording = false; isRecording = true; audioBuffer = new Byte[1000000]; audioRecord = new AudioRecord( AudioSource.Mic, 11025, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, audioBuffer.Length); audioRecord.StartRecording(); await ReadAudioSync(); }
private void RecordAudio() { if (File.Exists(wavPath)) { File.Delete(wavPath); } System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.CreateNew); BinaryWriter bWriter = new BinaryWriter(outputStream); int bufferSize = AudioRecord.GetMinBufferSize(11025, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit); audioBuffer = new byte[bufferSize]; audRecorder = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 11025, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. bufferSize ); audRecorder.StartRecording(); while (_isRecording == true) { try { /// Keep reading the buffer while there is audio input. audioData = audRecorder.Read(audioBuffer, 0, audioBuffer.Length); bWriter.Write(audioBuffer); } catch (System.Exception ex) { System.Console.Out.WriteLine(ex.Message); MessagingCenter.Send <ISoundRecorder, bool>(this, "finishReplaying", true); break; } } outputStream.Close(); bWriter.Close(); }
public void StartRecording() { var context = Plugin.CurrentActivity.CrossCurrentActivity.Current.Activity; var audioManager = (AudioManager)context.GetSystemService(Context.AudioService); _recorderSamplerate = int.Parse(audioManager.GetProperty(AudioManager.PropertyOutputSampleRate)); _recorder?.Release(); _bufferSize = AudioRecord.GetMinBufferSize(_recorderSamplerate, ChannelIn.Stereo, Encoding.Pcm16bit) * 3; _recorder = new AudioRecord(AudioSource.Mic, _recorderSamplerate, RecorderChannels, RecorderAudioEncoding, _bufferSize); _recorder.StartRecording(); _isRecording = true; _token = new CancellationTokenSource(); Task.Run(() => WriteAudioDataToFile(), _token.Token); }
public Task RecordAsync() { if (CanRecordAudio == false || audioRecord?.RecordingState == RecordState.Recording) { return(Task.CompletedTask); } var audioManager = (AudioManager)Application.Context.GetSystemService(Context.AudioService); var micSampleRate = Int32.Parse(audioManager.GetProperty(AudioManager.PropertyOutputSampleRate)); audioRecord = GetAudioRecord(micSampleRate); audioRecord.StartRecording(); return(Task.Run(() => WriteAudioDataToFile())); }
private void ReadThread() { _record.StartRecording(); while (_isrecording) { var size = _record.Read(_tmpBuffer, 0, _tmpBuffer.Length); for (var i = 0; i < _tmpBuffer.Length; i++) { _buffer[i] = _tmpBuffer[i] / 32767.0f;//(_tmpBuffer[i] > 100 || _tmpBuffer[i]<-100) ? _tmpBuffer[i] / 32767.0f : 0f; } _callback?.Invoke(_buffer); } _record.Stop(); _record.Release(); _record.Dispose(); }