private void ReadAudioAsync() { while (isRecording) { try { // Keep reading the buffer while there is audio input. audRecorder.Read(audioBuffer, 0, audioBuffer.Length); } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } byte[] realData = audioBuffer; Console.WriteLine(realData.Length); using (FileStream fs = new FileStream(path, FileMode.Create)) { WaveHeaderWriter.WriteHeader(fs, realData.Length, 1, 44100); fs.Write(realData, 0, realData.Length); fs.Close(); } }
void readLoop() { byte[] buffer = new byte[bufferSize]; while (!stopRecording) { try { int num_bytes = audioRecorder.Read(buffer, 0, buffer.Length); byte[] data_to_send = new byte[num_bytes]; Array.Copy(buffer, data_to_send, num_bytes); Task.Run(() => { OnSoundDataReceived(data_to_send); }); } catch (Exception e) { Logging.error("Exception occured while recording audio stream: " + e); break; } Thread.Sleep(10); } audioRecorder.Stop(); audioRecorder.Release(); audioRecorder.Dispose(); audioRecorder = null; running = false; }
private void recordLoop() { Android.OS.Process.SetThreadPriority(Android.OS.ThreadPriority.UrgentAudio); while (running) { int num_bytes = 0; try { if (audioRecorder != null) { if (audioEncoder is OpusEncoder) { num_bytes = audioRecorder.ReadAsync(shortsBuffer, 0, shortsBuffer.Length).Result; encode(num_bytes, true); } else { num_bytes = audioRecorder.Read(buffer, 0, buffer.Length); encode(num_bytes, false); } } else { stop(); } } catch (Exception e) { Logging.error("Exception occured while recording audio stream: " + e); } Thread.Sleep(1); } recordThread = null; }
void ReadAudio() { using (var fileStream = new FileStream(filePath, System.IO.FileMode.Create, System.IO.FileAccess.Write)) { while (true) { if (endRecording) { endRecording = false; break; } try { // Keep reading the buffer while there is audio input. int numBytes = audioRecord.Read(audioBuffer, 0, audioBuffer.Length); fileStream.Write(audioBuffer, 0, numBytes); // Do something with the audio input. } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } fileStream.Close(); } audioRecord.Stop(); audioRecord.Release(); isRecording = false; }
private async Task SaveBinaryAudio(Stream outputStream) { while (IsRecording == true) { try { audioData = audRecorder.Read(audioBuffer, 0, audioBuffer.Length); bWriter.Write(audioBuffer); } catch (Exception ex) { System.Console.Out.WriteLine(ex.Message); } } long longSampleRate = 44100; int channels = 2; long byteRate = 16 * longSampleRate * channels / 8; long totalAudioLen = audioBuffer.Length; long totalDataLen = totalAudioLen + 36; WriteWaveFileHeader(bWriter, totalAudioLen, totalDataLen, longSampleRate, channels, byteRate); outputStream.Close(); bWriter.Close(); }
public Stream EndCaptureAudio() { if (_recorder != null) { var read = _recorder.Read(_audioBuffer, 0, _audioBuffer.Length); var offset = TrimAudioZeros(read); _recorder.Stop(); var audioStream = _audioCaptureStream; _audioCaptureStream = null; if (read > offset) { audioStream.Write(_audioBuffer, offset, read - offset); } _recorder.Release(); _recorder.Dispose(); _recorder = null; MediaHelper.UpdateWavHeader(audioStream); audioStream.Seek(0, SeekOrigin.Begin); return(audioStream); } return(null); }
void WriteAudioDataToFile() { var data = new byte[bufferSize]; rawFilePath = GetTempFileName(); FileOutputStream outputStream = null; try { outputStream = new FileOutputStream(rawFilePath); } catch (Exception ex) { throw new FileLoadException($"unable to create a new file: {ex.Message}"); } if (outputStream != null) { while (audioRecord.RecordingState == RecordState.Recording) { audioRecord.Read(data, 0, bufferSize); outputStream.Write(data); } outputStream.Close(); } }
public byte[] StartRecording() { record.StartRecording(); probki = record.Read(audioData, 0, buffSizeInBytes); return(audioData); }
public void Run() { int len = 0, bufferIndex = 0; try { Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers(); int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2; while (!Thread.Interrupted()) { bufferIndex = mMediaCodec.DequeueInputBuffer(10000); if (bufferIndex >= 0) { inputBuffers[bufferIndex].Clear(); len = mAudioRecord.Read(inputBuffers[bufferIndex], bufferSize); if ((len == (int)RecordStatus.ErrorInvalidOperation) || (len == (int)RecordStatus.ErrorBadValue)) { Log.Error(TAG, "An error occured with the AudioRecord API !"); } else { //Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs: "+inputBuffers[bufferIndex].capacity()); mMediaCodec.QueueInputBuffer(bufferIndex, 0, len, Java.Lang.JavaSystem.NanoTime() / 1000, 0); } } } } catch (RuntimeException e) { e.PrintStackTrace(); } }
/********************************************************************************* * * *********************************************************************************/ public void ButtonRec_Click(object sender, EventArgs e) { Int32 bufferSize = AudioRecord.GetMinBufferSize(mSamplingRate, ChannelIn.Mono, mFormat); System.Diagnostics.Debug.WriteLine("AudioRecord : GetMinBufferSize={0}", bufferSize); RecordBuffer.Instance.Frames = mFrameSize; mAudioRecord = new AudioRecord( //AudioSource.Default, //AudioSource.Camcorder, AudioSource.Mic, //AudioSource.VoiceCommunication, //AudioSource.VoiceRecognition, //AudioSource.VoiceUplink, mSamplingRate, ChannelIn.Mono, mFormat, bufferSize); // 音声データを幾つずつ処理するか( = 1フレームのデータの数) mAudioRecord.SetPositionNotificationPeriod(RecordBuffer.Instance.Frames); // コールバックを指定 mAudioRecord.SetRecordPositionUpdateListener(new OnRecordPositionUpdateListener()); mAudioRecord.StartRecording(); Byte[] dummy = new Byte[1]; mAudioRecord.Read(dummy, 0, dummy.Length); }
/// <summary> /// Record from the microphone and broadcast the buffer. /// </summary> async Task Record() { byte [] data = new byte [bufferSize]; int readFailureCount = 0; int readResult = 0; Debug.WriteLine("AudioStream.Record(): Starting background loop to read audio stream"); while (Active) { try { // not sure if this is even a good idea, but we'll try to allow a single bad read, and past that shut it down if (readFailureCount > 1) { Debug.WriteLine("AudioStream.Record(): Multiple read failures detected, stopping stream"); await Stop(); break; } readResult = audioSource.Read(data, 0, bufferSize); // this can block if there are no bytes to read // readResult should == the # bytes read, except a few special cases if (readResult > 0) { readFailureCount = 0; OnBroadcast?.Invoke(this, data); } else { switch (readResult) { case (int)TrackStatus.ErrorInvalidOperation: case (int)TrackStatus.ErrorBadValue: case (int)TrackStatus.ErrorDeadObject: Debug.WriteLine("AudioStream.Record(): readResult returned error code: {0}", readResult); await Stop(); break; //case (int)TrackStatus.Error: default: readFailureCount++; Debug.WriteLine("AudioStream.Record(): readResult returned error code: {0}", readResult); break; } } } catch (Exception ex) { readFailureCount++; Debug.WriteLine("Error in Android AudioStream.Record(): {0}", ex.Message); OnException?.Invoke(this, ex); } } }
void RecordAudio() { sA = new SampleAggregator(1024); sA.FftCalculated += SA_FftCalculated; byte[] audioBuffer = new byte[2048]; var audRecorder = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 44100, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. audioBuffer.Length ); var startTime = DateTime.Now; var endTime = startTime.AddSeconds(60); audRecorder.StartRecording(); while (DateTime.Now < endTime) { try { // Keep reading the buffer while there is audio input. audRecorder.Read(audioBuffer, 0, audioBuffer.Length); int max = 0, currval = 0; int total = 0; for (int i = 0; i < audioBuffer.Length; i = i + 2) { currval = audioBuffer[i] * 256 + audioBuffer[i + 1]; sA.Add(currval); if (max < currval) { max = currval; } if (currval > 32767) { total = total + currval; } } //int level = max - 62719; //RunOnUiThread(() => seekBar2.Progress = (int) level); //Console.WriteLine(level*23); //SetBulbColour(0,level*23); // Write out the audio file. } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } audRecorder.Stop(); }
public void OnPeriodicNotification(AudioRecord recorder) { Byte[] buff = new Byte[RecordBuffer.Instance.Frames]; recorder.Read(buff, 0, buff.Length); RecordBuffer.Instance.Enqueue(buff); WaveBuffer.Instance.Enqueue(buff); }
public void OnMarkerReached(AudioRecord recorder) { Byte[] buff = new Byte[RecordBuffer.Instance.Frames]; recorder.Read(buff, 0, buff.Length); RecordBuffer.Instance.Enqueue(buff); WaveBuffer.Instance.Enqueue(buff); }
public short[] GetBlock(int count) { var e = new short[count]; if (_r.RecordingState == RecordState.Stopped) { return(e); } _r.Read(e, 0, count); return(e); }
private async void Record() { await Task.Run(() => { float[] buffer = new float[bufferSize * sizeof(float)]; while (_recording) { audioSource.Read(buffer, 0, sizeof(float), 0); OnBroadcast(this, new EventArgs <float[]>(buffer)); } }); }
private void WriteAudioDataToFile() { WvlLogger.Log(LogType.TraceAll, "WriteAudioDataToFile()"); byte[] data = new byte[bufferSize]; string filename = GetTempFilename(); FileOutputStream fos = null; try { fos = new FileOutputStream(filename); } catch (FileNotFoundException e) { // TODO Auto-generated catch block //e.printStackTrace(); WvlLogger.Log(LogType.TraceExceptions, e.ToString()); } int read = 0; if (null != fos) { while (isRecording) { read = recorder.Read(data, 0, bufferSize); //if (AudioRecord.ERROR_INVALID_OPERATION != read) if ((int)RecordStatus.ErrorInvalidOperation != read) { try { fos.Write(data); } catch (IOException e) { //e.printStackTrace(); WvlLogger.Log(LogType.TraceExceptions, "WriteAudioDataToFile - Exception on os.Write() : " + e.ToString()); } } } try { fos.Close(); } catch (IOException e) { //e.printStackTrace(); WvlLogger.Log(LogType.TraceExceptions, "WriteAudioDataToFile - Exception on os.Close() : " + e.ToString()); } } }
public void Listen() { byte[] audioBuffer = new byte[_bufferSizeInBytes]; int numberOfReadBytes = _audioRecorder.Read(audioBuffer, 0, _bufferSizeInBytes); double[] x = new double[audioBuffer.Length]; for (int i = 0; i < x.Length; i++) { x[i] = audioBuffer[i] / 32768.0; } double frequency = FrequencyUtils.FindFundamentalFrequency(x, RECORDER_SAMPLERATE, MinFreq, MaxFreq); //Fire event for passing back the recorded value. FinishedSampling(this, new FinishedSampalingEventArgs() { Frequency = new Hz((float)frequency * 2), //Volume = max_magnitude }); }
public override int Read(byte[] buffer, int offset, int count) { var bytesRead = audioRecord.Read(buffer, offset, count); if (bytesRead > 0) { vad.ProcessBufferEx(buffer, bytesRead); return(bytesRead); } return(0); }
private void ReadThread() { _record.StartRecording(); while (_isrecording) { var size = _record.Read(_buffer, 0, _buffer.Length); var result = new byte[size * 2]; Buffer.BlockCopy(_buffer, 0, result, 0, result.Length); _readSubject.OnNext(result); } _record.Stop(); }
void OnNext() { short[] audioBuffer = new short[2048]; audioRecord.Read(audioBuffer, 0, audioBuffer.Length); int[] result = new int[audioBuffer.Length]; for (int i = 0; i < audioBuffer.Length; i++) { result[i] = (int)audioBuffer[i]; } samplesUpdated(this, new SamplesUpdatedEventArgs(result)); }
/** * Records audio until stopped the {@link #task} is canceled, * {@link #continueRecording} is false, or {@link #clipListener} returns * true <br> * records audio to a short [readBufferSize] and passes it to * {@link #clipListener} <br> * uses an audio buffer of size bufferSize * bufferIncreaseFactor * * @param recordingBufferSize * minimum audio buffer size * @param readBufferSize * reads a buffer of this size * @param bufferIncreaseFactor * to increase recording buffer size beyond the minimum needed */ private bool doRecording(int sampleRate, Encoding encoding, int recordingBufferSize, int readBufferSize, int bufferIncreaseFactor) { if (recordingBufferSize < 0) { Log.Debug(TAG, "Bad encoding value, see logcat.Code=" + recordingBufferSize.ToString()); return(false); } // give it extra space to prevent overflow int increasedRecordingBufferSize = recordingBufferSize * bufferIncreaseFactor; recorder = new AudioRecord(AudioSource.Mic, sampleRate, ChannelIn.Mono, encoding, increasedRecordingBufferSize); byte[] readBuffer = new byte[readBufferSize]; continueRecording = true; Log.Debug(TAG, "start recording, " + "recording bufferSize: " + increasedRecordingBufferSize + " read buffer size: " + readBufferSize); //Note: possible IllegalStateException //if audio recording is already recording or otherwise not available //AudioRecord.getState() will be AudioRecord.STATE_UNINITIALIZED recorder.StartRecording(); while (continueRecording) { Thread.Sleep(5); int bufferResult = recorder.Read(readBuffer, 0, readBufferSize); //in case external code stopped this while read was happening if ((!continueRecording) || ((task != null) && task.IsCanceled)) { break; } // check for error conditions if (bufferResult < 0) { Log.Error(TAG, "error reading: ERROR_INVALID_OPERATION.Code=" + bufferResult); } else // no errors, do processing { clipListener.heard(readBuffer, sampleRate); } } done(); return(heard); }
private void RecordAudio() { if (File.Exists(wavPath)) { File.Delete(wavPath); } System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.CreateNew); BinaryWriter bWriter = new BinaryWriter(outputStream); int bufferSize = AudioRecord.GetMinBufferSize(11025, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit); audioBuffer = new byte[bufferSize]; audRecorder = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 11025, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. bufferSize ); audRecorder.StartRecording(); while (_isRecording == true) { try { /// Keep reading the buffer while there is audio input. audioData = audRecorder.Read(audioBuffer, 0, audioBuffer.Length); bWriter.Write(audioBuffer); } catch (System.Exception ex) { System.Console.Out.WriteLine(ex.Message); MessagingCenter.Send <ISoundRecorder, bool>(this, "finishReplaying", true); break; } } outputStream.Close(); bWriter.Close(); }
private void ReadAudioAsync() { while (isRecording) { try { // Keep reading the buffer while there is audio input. audRecorder.Read(audioBuffer, 0, audioBuffer.Length); } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } }
private void ReadThread() { _record.StartRecording(); while (_isrecording) { var size = _record.Read(_tmpBuffer, 0, _tmpBuffer.Length); for (var i = 0; i < _tmpBuffer.Length; i++) { _buffer[i] = _tmpBuffer[i] / 32767.0f;//(_tmpBuffer[i] > 100 || _tmpBuffer[i]<-100) ? _tmpBuffer[i] / 32767.0f : 0f; } _callback?.Invoke(_buffer); } _record.Stop(); _record.Release(); _record.Dispose(); }
void OnNext() { WvlLogger.Log(LogType.TraceAll, "OnNext()"); //short[] audioBuffer = new short[2048]; short[] audioBuffer = new short[1024]; WvlLogger.Log(LogType.TraceValues, "OnNext() - audioRecord.Read - audioBuffer.Length : " + audioBuffer.Length.ToString()); audioRecord.Read(audioBuffer, 0, audioBuffer.Length); int[] result = new int[audioBuffer.Length]; for (int i = 0; i < audioBuffer.Length; i++) { result[i] = (int)audioBuffer[i]; } samplesUpdated(this, new SamplesUpdatedEventArgs(result)); }
void WriteAudioDataToFile() { byte[] data = new byte[bufferSize]; var filename = GetTempFilename(); FileOutputStream os = null; System.Diagnostics.Debug.WriteLine(filename); try { os = new FileOutputStream(filename); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } int read = 0; if (os != null) { while (isRecording) { read = recorder.Read(data, 0, bufferSize); try { os.Write(data); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } } try { os.Close(); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } } }
public override void Run() { Android.OS.Process.SetThreadPriority(ThreadPriority.UrgentAudio); AudioRecord recorder = null; short[][] buffers = new short[256][]; int ix = 0; for (int i = 0; i < 256; i++) { buffers[i] = new short[160]; } try { // ... initialise int N = AudioRecord.GetMinBufferSize(8000, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit); recorder = new AudioRecord(AudioSource.Mic, 8000, ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, N * 10); recorder.StartRecording(); // ... loop while (!stopped) { short[] buffer = buffers[ix++ % buffers.Length]; N = recorder.Read(buffer, 0, buffer.Length); //process is what you will do with the data...not defined here process(buffer); } } catch (Throwable x) { Console.WriteLine(x.GetType() + " " + x.Message); } finally { close(); } }
protected override void Work() { while (recorder == null || recorder.State == State.Uninitialized) { Thread.Sleep(100); } var buffor = new byte[bufferSize]; var audioSize = recorder.Read(buffor, 0, bufferSize); if (audioSize > 0) { pcmSender.Send(new MultiPlatform.Packets.Audio.PCMPacket() { Data = buffor }); Thread.Sleep(1); } }
void OnNext() { WvlLogger.Log(LogType.TraceAll, "OnNext()"); short[] audioBuffer = new short[2048]; audioRecord.Read(audioBuffer, 0, audioBuffer.Length); int[] result = new int[audioBuffer.Length]; for (int i = 0; i < audioBuffer.Length; i++) { result[i] = (int)audioBuffer[i]; } // stop button debug /* * samplesUpdated(this, new SamplesUpdatedEventArgs(result)); */ }