public void AudioPlayRelease() { record.Stop(); audioTrack.Stop(); audioTrack.Release(); record.Release(); }
public void Clear() { recorder?.Stop(); recorder?.Release(); recorder?.Dispose(); recorder = null; audioTrack?.Stop(); audioTrack?.Release(); audioTrack?.Dispose(); audioTrack = null; }
public void Stop() { lock (stateLocker) { if (recorder != null) { if (recorder.RecordingState == RecordState.Recording) { recorder.Stop(); recorder.Release(); recorder = null; recordingThread = null; } } if (player != null) { if (player.PlayState == PlayState.Playing) { player.Stop(); player.Release(); player = null; } } OnMessage?.Invoke(this, "Stopped"); } }
void readLoop() { byte[] buffer = new byte[bufferSize]; while (!stopRecording) { try { int num_bytes = audioRecorder.Read(buffer, 0, buffer.Length); byte[] data_to_send = new byte[num_bytes]; Array.Copy(buffer, data_to_send, num_bytes); Task.Run(() => { OnSoundDataReceived(data_to_send); }); } catch (Exception e) { Logging.error("Exception occured while recording audio stream: " + e); break; } Thread.Sleep(10); } audioRecorder.Stop(); audioRecorder.Release(); audioRecorder.Dispose(); audioRecorder = null; running = false; }
public Stream EndCaptureAudio() { if (_recorder != null) { var read = _recorder.Read(_audioBuffer, 0, _audioBuffer.Length); var offset = TrimAudioZeros(read); _recorder.Stop(); var audioStream = _audioCaptureStream; _audioCaptureStream = null; if (read > offset) { audioStream.Write(_audioBuffer, offset, read - offset); } _recorder.Release(); _recorder.Dispose(); _recorder = null; MediaHelper.UpdateWavHeader(audioStream); audioStream.Seek(0, SeekOrigin.Begin); return(audioStream); } return(null); }
async Task ReadAudioAsync() { using (var fileStream = new FileStream(filePath, System.IO.FileMode.Create, System.IO.FileAccess.Write)) { while (true) { if (endRecording) { endRecording = false; break; } try { // Keep reading the buffer while there is audio input. int numBytes = await audioRecord.ReadAsync(audioBuffer, 0, audioBuffer.Length); await fileStream.WriteAsync(audioBuffer, 0, numBytes); // Do something with the audio input. } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } fileStream.Close(); } audioRecord.Stop(); audioRecord.Release(); isRecording = false; RaiseRecordingStateChangedEvent(); }
/// <summary> /// Stop recorder /// </summary> public void StopRecording() { isRecording = false; try { if (audioRecord.State == State.Initialized) { // Stop Audio Recorder audioRecord.Stop(); audioRecord.Release(); audioRecord = null; // Stop thread audioThread.Abort(); delegateThreadStart = null; audioThread = null; } // Create file path for .wav file wavFile = Android.OS.Environment.ExternalStorageDirectory.Path + "/AudioRecorderFile.wav"; ConvertRawFileToWavFile(rawFile, wavFile); // Delete temp file new Java.IO.File(rawFile).Delete(); } catch (Exception e) { throw new System.ArgumentException(e.Message); } }
private async Task RecordAudioAsync() { wavPath = Path.Combine(audioDir, Guid.NewGuid().ToString() + "_audio.wav"); byte[] audioBuffer = new byte[8000]; audioRecord = new AudioRecord( AudioSource.Mic, // Hardware source of recording. sampleRate, // Frequency channelIn, // Mono or stereo encoding, // Audio encoding audioBuffer.Length // Length of the audio clip. ); var id = audioRecord.AudioSessionId; audioRecord.StartRecording(); int totalAudioLen = 0; isRecording = true; using (System.IO.Stream outputStream = System.IO.File.Open(wavPath, FileMode.Create)) using (BinaryWriter bWriter = new BinaryWriter(outputStream)) { //init a header with no length - it will be added later WriteWaveFileHeader(bWriter, maxAudioFreamesLength); /// Keep reading the buffer while there is audio input. while (isRecording && totalAudioLen <= maxAudioFreamesLength) { totalAudioLen += await audioRecord.ReadAsync(audioBuffer, 0, audioBuffer.Length); bWriter.Write(audioBuffer); //analysis var intbuffer = ByteArrayTo16Bit(audioBuffer); var min = intbuffer.Min(); var max = intbuffer.Max(); var avg = intbuffer.Average(x => (double)x); var sos = intbuffer.Select(x => (long)x) .Aggregate((prev, next) => prev + next * next); var rms = Math.Sqrt((double)1 / intbuffer.Length * sos); var fft = FFT(intbuffer); } isRecording = false; //write lenght to header outputStream.Close(); bWriter.Close(); } audioRecord.Stop(); audioRecord.Dispose(); //this file is now fully written and can be sent to server for analysis OnAudioReadyForUpload(new AudioUploadEventArgs(DateTime.Now.ToUniversalTime(), wavPath)); }
public Task Stop() { return(Task.Run( () => { _audioSource.Stop(); _audioSource = null; })); }
void RecordAudio() { sA = new SampleAggregator(1024); sA.FftCalculated += SA_FftCalculated; byte[] audioBuffer = new byte[2048]; var audRecorder = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 44100, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. audioBuffer.Length ); var startTime = DateTime.Now; var endTime = startTime.AddSeconds(60); audRecorder.StartRecording(); while (DateTime.Now < endTime) { try { // Keep reading the buffer while there is audio input. audRecorder.Read(audioBuffer, 0, audioBuffer.Length); int max = 0, currval = 0; int total = 0; for (int i = 0; i < audioBuffer.Length; i = i + 2) { currval = audioBuffer[i] * 256 + audioBuffer[i + 1]; sA.Add(currval); if (max < currval) { max = currval; } if (currval > 32767) { total = total + currval; } } //int level = max - 62719; //RunOnUiThread(() => seekBar2.Progress = (int) level); //Console.WriteLine(level*23); //SetBulbColour(0,level*23); // Write out the audio file. } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } audRecorder.Stop(); }
public void Stop() { if (_isRecording == true) { _isRecording = false; audRecorder.Stop(); audioDataBytes = File.ReadAllBytes(wavPath); audRecorder.Release(); } }
public bool StartListening(String searchName, int timeout = -1) { if (null != _recognizerThread) { return(false); } _decoder.SetSearch(searchName); _interruption = new CancellationTokenSource(); _recognizerThread = Task.Run(() => { try { RecognizeAsync(timeout); } catch (OperationCanceledException e) { System.Diagnostics.Debug.WriteLine(TAG, "!! received cancel token !!"); } catch (System.Exception e) { System.Diagnostics.Debug.WriteLine("EXCEPTION: unhandled exception : " + e.Message); } finally { try { _recorder.Stop(); _decoder.EndUtt(); } catch (System.Exception e) { System.Diagnostics.Debug.WriteLine("EXCEPTION: calling recorder stop method. " + e.Message); } System.Diagnostics.Debug.WriteLine(TAG, "Recognizer thread is stopped"); OnStopped(); } }, _interruption.Token); return(true); }
/** * need to call this when completely done with recording */ public void done() { Log.Debug(TAG, "shut down recorder"); if (recorder != null) { recorder.Stop(); recorder.Release(); recorder = null; } }
protected override void OnPause() { base.OnPause(); player?.Release(); player?.Dispose(); player = null; audRecorder?.Stop(); audRecorder?.Release(); audRecorder = null; }
private void KillAudioJack() { try { audioJack.Stop(); audioJack.Release(); } catch { //do nothing, this was anyhow just an attempt to release the audio jack in a clean way } }
public void StopRecording() { if (recorder != null) { recorder.Stop(); isRecording = false; token.Cancel(); recorder.Release(); recorder = null; } CopyWaveFile(GetTempFilename(), GetFilename()); }
private void ReadThread() { _record.StartRecording(); while (_isrecording) { var size = _record.Read(_buffer, 0, _buffer.Length); var result = new byte[size * 2]; Buffer.BlockCopy(_buffer, 0, result, 0, result.Length); _readSubject.OnNext(result); } _record.Stop(); }
public Task <AudioRecording> StopAsync() { if (audioRecord != null && audioRecord.RecordingState == RecordState.Recording) { audioRecord.Stop(); } audioFilePath = GetTempFileName(); CopyWaveFile(rawFilePath, audioFilePath); return(Task.FromResult(GetRecording())); }
public void StopRecording() { if (_recorder == null) { return; } _isRecording = false; _recorder.Stop(); _recorder.Release(); _recorder = null; }
public void StopRecording() { if (recorder != null) { recorder.Stop(); isRecording = false; token.Cancel(); recorder.Release(); recorder = null; } CopyWaveFile(tempFileName, FileHelper.GetFilename(Constants.AudioFilename)); }
private async Task RecordAudio(List <byte[]> audioTracks, MemoryStream memoryStream) { if (memoryStream == null) { memoryStream = new MemoryStream(); } while (true) { if (endRecording) { endRecording = false; break; } try { // Keep reading the buffer while there is audio input. await audioRecorder.ReadAsync(audioBuffer, 0, audioBuffer.Length); // Write out the audio file. await memoryStream.WriteAsync(audioBuffer, 0, audioBuffer.Length); await Console.Out.WriteLineAsync("RECORDING SOUND. Memory stream size:" + memoryStream.Length); } catch (Exception ex) { Console.Out.WriteLine(ex.Message); break; } } await Console.Out.WriteLineAsync("We successfully stopped recording."); audioRecorder.Stop(); audioRecorder.Release(); var mainTextView = activity.FindViewById <TextView>(mainTextViewId); if (memoryStream.Length > 0) { audioTracks.Add(memoryStream.ToArray()); mainTextView.Text = "Numbers of Tracks:" + audioTracks.Count; } isRecording = false; RaiseRecordingStateChangedEvent(); }
public void StopRecording() { WvlLogger.Log(LogType.TraceAll, "StopRecording()"); if (null != audioRecord) { isRecording = false; if (audioRecord.State == State.Initialized) { audioRecord.Stop(); } audioRecord.Release(); audioRecord = null; recordingThread = null; } /* * if (null != audioRecordCharts) * { * if (audioRecordCharts.State == State.Initialized) * { * audioRecordCharts.Stop(); * * // Write file after recording * isWriting = true; * WriteAudioDataToFileAfterRecording(); * } * audioRecordCharts.Release(); * * audioRecordCharts = null; * chartsThread = null; * * samplesUpdatedThread = null; * } */ /* * if (audioRecordCharts.State == State.Initialized) * { * audioRecordCharts.Stop(); * WriteAudioDataToFileAfterRecording(); * } * audioRecordCharts.Release(); */ CopyWaveFile(GetTempFilename(), GetFilename()); //DeleteTempFile(); }
public void Stop() { if (_isRecording && _recorder == null) { _tcs.TrySetResult(new AudioRecordResult($"Not recording")); } _timeoutToken?.Cancel(); _isRecording = false; if (_recorder.State == State.Initialized) { _recorder.Stop(); } _recorder.Release(); // Android audio is raw stream content, so add WAV header var wavstream = new MemoryStream(); WriteWaveFileHeader(wavstream, _ms.Length, _options.SampleRate, 1); _ms.Seek(0, SeekOrigin.Begin); _ms.CopyTo(wavstream); _ms.Close(); _ms.Dispose(); _ms = null; if (_options.StreamFormat == AudioRecordOptions.Format.Wave) { _tcs.TrySetResult(new AudioRecordResult(wavstream.ToArray())); } else if (_options.StreamFormat == AudioRecordOptions.Format.Flac) { // encode audio into flac using (var ms = new MemoryStream()) { using (var what = new WaveOverFlacStream(ms, WaveOverFlacStreamMode.Encode, true)) { wavstream.Seek(0, SeekOrigin.Begin); wavstream.CopyTo(what); } ms.Flush(); ms.Seek(0, SeekOrigin.Begin); _tcs.TrySetResult(new AudioRecordResult(ms.ToArray())); } } }
public string StopRecording() { if (_recorder != null) { _isRecording = false; _recorder.Stop(); _recorder.Release(); _recorder = null; } var file = GetFilename(Guid.NewGuid().ToString()); CopyWaveFile(GetTempFilename(), file); return(file); }
/** Stops the stream. */ public void stop() { if (mStreaming) { if (mMode == MODE_MEDIACODEC_API) { Log.d(TAG, "Interrupting threads..."); mThread.Interrupt(); mAudioRecord.Stop(); mAudioRecord.Release(); mAudioRecord = null; } base.stop(); } }
/// <summary> /// Stops the audio stream. /// </summary> public Task Stop() { if (Active) { audioSource.Stop(); audioSource.Release(); OnActiveChanged?.Invoke(this, false); } else // just in case { audioSource?.Release(); } return(Task.FromResult(true)); }
private void ReadThread() { _record.StartRecording(); while (_isrecording) { var size = _record.Read(_tmpBuffer, 0, _tmpBuffer.Length); for (var i = 0; i < _tmpBuffer.Length; i++) { _buffer[i] = _tmpBuffer[i] / 32767.0f;//(_tmpBuffer[i] > 100 || _tmpBuffer[i]<-100) ? _tmpBuffer[i] / 32767.0f : 0f; } _callback?.Invoke(_buffer); } _record.Stop(); _record.Release(); _record.Dispose(); }
private void cleanUp() { running = false; if (audioRecorder != null) { try { audioRecorder.Stop(); audioRecorder.Release(); }catch (Exception) { } audioRecorder.Dispose(); audioRecorder = null; } if (audioEncoder != null) { try { audioEncoder.Stop(); audioEncoder.Release(); } catch (Exception) { } audioEncoder.Dispose(); audioEncoder = null; } buffer = null; bufferSize = 0; lock (outputBuffers) { outputBuffers.Clear(); } lock (availableBuffers) { availableBuffers.Clear(); } }
public override void OnDestroy() { base.OnDestroy(); if (tokenSource != null) { try { tokenSource.Cancel(); task.Wait(); } catch (AggregateException) { record.Stop(); record.Release(); ws.Close(); task.Dispose(); Console.WriteLine("Task Cancelled."); } } }
/// <summary> /// start the recording and send the recorded bytes to a callback /// </summary> /// <param name="callback">Callback to handle recorded bytes</param> public void StartRecording(Action <byte[]> callback) { if (IsRecording) { return; // already running dont start again } IsRecording = true; byte[] audioBuffer = new byte[bufferLength]; var audRecorder = new AudioRecord( // Hardware source of recording. AudioSource.Mic, // Frequency 11025, // Mono or stereo ChannelIn.Mono, // Audio encoding Android.Media.Encoding.Pcm16bit, // Length of the audio clip. audioBuffer.Length ); audRecorder.StartRecording(); while (IsRecording) { try { // Keep reading the buffer while there is audio input. audRecorder.Read(audioBuffer, 0, audioBuffer.Length); // Write out the audio file. callback?.Invoke(audioBuffer); } catch (Exception ex) { IsRecording = false; System.Diagnostics.Debug.WriteLine("EXCEption : " + ex.Message + " -- " + ex.StackTrace); break; } } audRecorder.Stop(); audRecorder.Release(); }