public void Stop() { if (!IsInited) { return; } lock (syncObj) { capture.Stop(); } }
public void Stop() { if (!IsInited) { return; } lock (syncObj) { capture.Stop(); if (systemTimer != null) { systemTimer.Dispose(); } systemTimer = null; } }
/// <summary> /// Stop recording from the Microphone /// </summary> public void StopRecording() { if (IsMicrophoneValid) { continuePolling = false; audio_capture.Stop(); ClearBuffers(0); } }
public override void Disable() { if (!enabled) { return; } audioCapture.Stop(); enabled = false; }
public void StopEcho() { if (Capture == null) { return; } AL.SourceStop(PlaybackSrc); int bufc; AL.GetSource(PlaybackSrc, ALGetSourcei.BuffersQueued, out bufc); if (bufc > 0) { int[] bufs = AL.SourceUnqueueBuffers(PlaybackSrc, bufc); AL.DeleteBuffers(bufs); } AL.DeleteSource(PlaybackSrc); PlaybackSrc = 0; Capture.Stop(); Capture.Dispose(); Capture = null; }
void StopRecording() { timer_GetSamples.Stop(); if (audio_capture != null) { audio_capture.Stop(); audio_capture.Dispose(); audio_capture = null; } if (audio_context != null) { int r; AL.GetSource(src, ALGetSourcei.BuffersQueued, out r); ClearBuffers(r); AL.DeleteSource(src); audio_context.Dispose(); audio_context = null; } }
private void ChEnable_CheckedChanged(object sender, EventArgs e) { if (ChEnable.Checked) { try { AudioCapturer = new AudioCapture(AudioCapture.AvailableDevices[LsAudioDevices.SelectedIndex], 8000, ALFormat.Mono8, 256); AudioCapturer.Start(); } catch { } } else { if (AudioCapturer != null) { AudioCapturer.Stop(); AudioCapturer.Dispose(); AudioCapturer = null; System.Threading.Thread.Sleep(100); } } }
/// <summary> /// 关闭语音 /// </summary> public void Stop() { _audioPlayer.Stop(); _audioCapture.Stop(); _transferClient.Disconnect(); }
public RecorderDiagnostic() { Trace.WriteLine("--- AudioCapture related errors ---"); IsDeviceAvailable = false; try { r = new AudioCapture(AudioCapture.DefaultDevice, 16000, ALFormat.Mono16, 4096); } catch (AudioDeviceException ade) { Trace.WriteLine("AudioCapture Exception caught: " + ade.Message); return; } IsDeviceAvailable = true; DeviceName = r.CurrentDevice; CheckRecorderError("Alc.CaptureOpenDevice"); r.Start(); CheckRecorderError("Alc.CaptureStart"); Thread.Sleep(100); r.Stop(); CheckRecorderError("Alc.CaptureStop"); byte[] Buffer = new byte[8192]; Thread.Sleep(10); // Wait for a few samples to become available. int SamplesBefore = r.AvailableSamples; CheckRecorderError("Alc.GetInteger(...CaptureSamples...)"); r.ReadSamples(Buffer, (SamplesBefore > 4096 ? 4096 : SamplesBefore)); CheckRecorderError("Alc.CaptureSamples"); int SamplesCaptured = SamplesBefore - r.AvailableSamples; uint ZeroCounter = 0; for (int i = 0; i < SamplesCaptured * 2; i++) { if (Buffer[i] == 0) ZeroCounter++; } for (int i = 0; i < SamplesCaptured; i++) { short sample = BitConverter.ToInt16(Buffer, i * 2); if (sample > MaxSample) MaxSample = sample; if (sample < MinSample) MinSample = sample; } if (ZeroCounter < SamplesCaptured * 2 && SamplesCaptured > 0) BufferContentsAllZero = false; else BufferContentsAllZero = true; r.Dispose(); CheckRecorderError("Alc.CaptureCloseDevice"); // no playback test needed due to Parrot test app. /* uint buf; AL.GenBuffer(out buf); AL.BufferData(buf, ALFormat.Mono16, BufferPtr, SamplesCaptured * 2, 16000); uint src; AL.GenSource(out src); AL.BindBufferToSource(src, buf); AL.Listener(ALListenerf.Gain, 16.0f); AL.SourcePlay(src); while (AL.GetSourceState(src) == ALSourceState.Playing) { Thread.Sleep(0); } AL.SourceStop(src); AL.DeleteSource(ref src); AL.DeleteBuffer(ref buf); */ }
public RecorderDiagnostic() { Trace.WriteLine("--- AudioCapture related errors ---"); IsDeviceAvailable = false; try { r = new AudioCapture(AudioCapture.DefaultDevice, 16000, ALFormat.Mono16, 4096); } catch (AudioDeviceException ade) { Trace.WriteLine("AudioCapture Exception caught: " + ade.Message); return; } IsDeviceAvailable = true; DeviceName = r.CurrentDevice; CheckRecorderError("Alc.CaptureOpenDevice"); r.Start(); CheckRecorderError("Alc.CaptureStart"); Thread.Sleep(100); r.Stop(); CheckRecorderError("Alc.CaptureStop"); byte[] Buffer = new byte[8192]; Thread.Sleep(10); // Wait for a few samples to become available. int SamplesBefore = r.AvailableSamples; CheckRecorderError("Alc.GetInteger(...CaptureSamples...)"); r.ReadSamples(Buffer, (SamplesBefore > 4096 ? 4096 : SamplesBefore)); CheckRecorderError("Alc.CaptureSamples"); int SamplesCaptured = SamplesBefore - r.AvailableSamples; uint ZeroCounter = 0; for (int i = 0; i < SamplesCaptured * 2; i++) { if (Buffer[i] == 0) { ZeroCounter++; } } for (int i = 0; i < SamplesCaptured; i++) { short sample = BitConverter.ToInt16(Buffer, i * 2); if (sample > MaxSample) { MaxSample = sample; } if (sample < MinSample) { MinSample = sample; } } if (ZeroCounter < SamplesCaptured * 2 && SamplesCaptured > 0) { BufferContentsAllZero = false; } else { BufferContentsAllZero = true; } r.Dispose(); CheckRecorderError("Alc.CaptureCloseDevice"); // no playback test needed due to Parrot test app. /* * uint buf; * AL.GenBuffer(out buf); * AL.BufferData(buf, ALFormat.Mono16, BufferPtr, SamplesCaptured * 2, 16000); * uint src; * AL.GenSource(out src); * AL.BindBufferToSource(src, buf); * AL.Listener(ALListenerf.Gain, 16.0f); * AL.SourcePlay(src); * while (AL.GetSourceState(src) == ALSourceState.Playing) * { * Thread.Sleep(0); * } * AL.SourceStop(src); * * AL.DeleteSource(ref src); * AL.DeleteBuffer(ref buf); */ }
public void stop() { myDevice.Stop(); }
public Task <string> RecordToWav() { Directory.CreateDirectory($"./{_configuration.WavFilesFolderName}"); var wavFile = $"./{_configuration.WavFilesFolderName}/{Guid.NewGuid()}.wav"; var recorders = AudioCapture.AvailableDevices; for (int i = 0; i < recorders.Count; i++) { Console.WriteLine(recorders[i]); } Console.WriteLine("-----"); const int samplingRate = 44100; // Samples per second const ALFormat alFormat = ALFormat.Mono16; const ushort bitsPerSample = 16; // Mono16 has 16 bits per sample const ushort numChannels = 1; // Mono16 has 1 channel using (var f = File.OpenWrite(wavFile)) using (var sw = new BinaryWriter(f)) { // Read This: http://soundfile.sapp.org/doc/WaveFormat/ sw.Write(new char[] { 'R', 'I', 'F', 'F' }); sw.Write(0); // will fill in later sw.Write(new char[] { 'W', 'A', 'V', 'E' }); // "fmt " chunk (Google: WAVEFORMATEX structure) sw.Write(new char[] { 'f', 'm', 't', ' ' }); sw.Write(16); // chunkSize (in bytes) sw.Write((ushort)1); // wFormatTag (PCM = 1) sw.Write(numChannels); // wChannels sw.Write(samplingRate); // dwSamplesPerSec sw.Write(samplingRate * numChannels * (bitsPerSample / 8)); // dwAvgBytesPerSec sw.Write((ushort)(numChannels * (bitsPerSample / 8))); // wBlockAlign sw.Write(bitsPerSample); // wBitsPerSample // "data" chunk sw.Write(new char[] { 'd', 'a', 't', 'a' }); sw.Write(0); // will fill in later // 10 seconds of data. overblown, but it gets the job done const int bufferLength = samplingRate * 10; int samplesWrote = 0; Console.WriteLine($"Recording from: {recorders[0]}"); using (var audioCapture = new AudioCapture( recorders[0], samplingRate, alFormat, bufferLength)) { var buffer = new short[bufferLength]; audioCapture.Start(); for (int i = 0; i < _configuration.SecondsToRecord; ++i) { Thread.Sleep(1000); // give it some time to collect samples var samplesAvailable = audioCapture.AvailableSamples; audioCapture.ReadSamples(buffer, samplesAvailable); for (var x = 0; x < samplesAvailable; ++x) { sw.Write(buffer[x]); } samplesWrote += samplesAvailable; Console.WriteLine($"Wrote {samplesAvailable}/{samplesWrote} samples..."); } audioCapture.Stop(); } sw.Seek(4, SeekOrigin.Begin); // seek to overall size sw.Write(36 + samplesWrote * (bitsPerSample / 8) * numChannels); sw.Seek(40, SeekOrigin.Begin); // seek to data size position sw.Write(samplesWrote * (bitsPerSample / 8) * numChannels); } return(Task.FromResult(wavFile)); }