/// <summary> /// Stop recording. /// </summary> public void StopRecording() { if (soundIn == null) { return; } isRecording = false; soundIn?.Stop(); }
private void bgInputListener_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { Trace.TraceInformation("Background Microphone Listener closing. State: {0}", state.ToString()); state = States.Stopping; _soundCapture?.Stop(); _soundCapture?.Dispose(); _isSoundAlertPlaying = false; if (_soundOut.PlaybackState != PlaybackState.Stopped) { _soundOut.Stop(); } _soundOut.Dispose(); state = States.Stopped; Close(); }
public void Dispose() { //Dont dispose capture device _wasapiCapture?.Stop(); _captureSource?.Dispose(); _wasapiCapture?.Dispose(); _opusEncoder?.Dispose(); }
private void VisualizerForm_FormClosing(object sender, FormClosingEventArgs e) { FrameTimer.Stop(); Matrix.Clear(); SoundIn?.Stop(); SoundIn?.Dispose(); Source?.Dispose(); }
public void StopCapture() { _capture.Stop(); if (!_writer.IsDisposed) { _writer.Dispose(); } _capture.Dispose(); }
void StartCapturingAndHold(WasapiCapture capture) { capture.Start(); #if DEBUG Console.WriteLine("Start Capturing..."); Console.WriteLine("Input Format: " + capture.WaveFormat.ToString()); #endif _ = Console.ReadKey(); capture.Stop(); }
private void OnFinishedRecordingEvent(object sender, ElapsedEventArgs e) { _capture.Stop(); //stop recording _recordFileTimer.Enabled = false; //timer disabled _writer.Dispose(); _capture.Dispose(); Console.WriteLine("now really finished"); }
public void StopCapture() { if (isCaptureAvailable) { capture.Stop(); } else { throw new WasapiNotInitializedException("This instance isn't currently active."); } }
internal void StopListen() { if (capture != null) { if (capture.RecordingState == RecordingState.Recording) { capture.Stop(); } capture.Dispose(); } }
private void StopAudioLoopback() { _soundIn.Stop(); _soundIn.Dispose(); _finalSource.Dispose(); _stdOut.Stop(); _oggEncProcess.StandardInput.Close(); _oggEncProcess.StandardOutput.Close(); _oggEncProcess.WaitForExit(); _oggEncProcess.Dispose(); }
public void Stop() { if (!IsRunning) { return; } capture.Stop(); render.Stop(); OnStateChanged?.Invoke(EMicState.Stop); }
public static async Task RecordSample() { //create a new soundIn instance var soundIn = new WasapiCapture(); //optional: set some properties //soundIn.Device = ... //... soundIn.Device = new DeviceService().InputDevices().First(); soundIn.Initialize(); var waveWriter = new WaveWriter(@"C:\Users\Cedric Lampron\Desktop\Test Record\dump.wav", soundIn.WaveFormat);; await Task.Run(() => { //create a SoundSource around the the soundIn instance //this SoundSource will provide data, captured by the soundIn instance var soundInSource = new SoundInSource(soundIn) { FillWithZeros = false }; //create a source, that converts the data provided by the //soundInSource to any other format //in this case the "Fluent"-extension methods are being used IWaveSource convertedSource = soundInSource .ToStereo() //2 channels (for example) .ChangeSampleRate(8000) // 8kHz sample rate .ToSampleSource() .ToWaveSource(16); //16 bit pcm //register an event handler for the DataAvailable event of //the soundInSource //Important: use the DataAvailable of the SoundInSource //If you use the DataAvailable event of the ISoundIn itself //the data recorded by that event might won't be available at the //soundInSource yet soundInSource.DataAvailable += (s, e) => { waveWriter.Write(e.Data, e.Offset, e.ByteCount); }; //we've set everything we need -> start capturing data soundIn.Start(); }); await Task.Delay(5000); soundIn.Stop(); waveWriter.Dispose(); waveWriter = null; soundIn.Dispose(); soundIn = null; }
protected override void DisposeDevice() { if (_capture != null) { _capture.Stop(); _capture.DataAvailable -= OnCaptureDataAvailable; _capture.Dispose(); _capture = null; } base.DisposeDevice(); }
public void StopCapture() { if (capture != null) { Console.WriteLine("Stop Capturing..."); capture.Stop(); capture.Dispose(); capture = null; waveSource.Dispose(); Console.WriteLine("Stopped!"); } }
private void button_Copy_Click(object sender, RoutedEventArgs e) { if (w != null && capture != null) { //stop recording w.Stop(); capture.Stop(); w.Dispose(); w = null; capture.Dispose(); capture = null; } }
/* * Stop the audio capture, if currently recording. Properly disposes member objects. */ private void StopCapture() { if (wasapiCapture.RecordingState == RecordingState.Recording) { wasapiCapture.Stop(); finalSource.Dispose(); notificationSource.Dispose(); wasapiCapture.Dispose(); SampleHandler = null; } }
/// <summary> /// Stops the audio capture. /// </summary> /// <exception cref="InvalidOperationException">if the audio capture is not active</exception> public void Stop() { if (Interlocked.CompareExchange(ref _isRunning, 0, 1) == 1) { _log.InfoFormat( "Stopping audio capture with formats [{0}] -> [{1}]", _soundInSource.WaveFormat, _convertedSource.WaveFormat ); _capture.Stop(); } else { var message = string.Format( "Cannot stop audio capture with formats [{0}] -> [{1}]; capture is not active", _soundInSource.WaveFormat, _convertedSource.WaveFormat ); _log.Warn(message); throw new InvalidOperationException(message); } }
protected override void OnDeviceStateChanged(DeviceDescription deviceDescription) { base.OnDeviceStateChanged(deviceDescription); if (deviceDescription.IsActive) { _capture?.Start(); } else { _capture?.Stop(); } }
public void RecordAudio(MMDevice device) { if (_soundIn.RecordingState == RecordingState.Recording) { _soundIn.Stop(); } _soundIn.Device = device; _soundIn.Initialize(); var src = new SoundInSource(_soundIn); src.DataAvailable += (s, e) => { int read; read = _source.Read(_buffer, 0, _buffer.Length); }; //_source = new SoundInSource(_soundIn) { FillWithZeros = true }; var singleBlockNotificationStream = new SingleBlockNotificationStream(src.ToSampleSource()); //_echoSource = new DmoEchoEffect(_source); _source = singleBlockNotificationStream.ToWaveSource(); _soundIn.Start(); }
void OnApplicationQuit() { //TODO: Delete 1==0 if (enabled && 1 == 0) { foreach (var so in _soundOutList) { so.Stop(); so.Dispose(); } capture.Stop(); capture.Dispose(); } }
private void StopCapture() { if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; if (waveWriter is IDisposable) { ((IDisposable)waveWriter).Dispose(); } } }
public override void DisablePlugin() { if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; } if (_source != null) { _source.Dispose(); _source = null; } }
private void StopRecordBtn_Clicked(object sender, RoutedEventArgs e) { //stop recording if (null != capture) { capture.Stop(); writer.Dispose(); writer = null; capture.Dispose(); capture = null; this.StartRecordBtn.IsEnabled = true; this.StopRecordBtn.IsEnabled = false; } }
public void Stop() { if (_notificationSource != null) { _notificationSource.SingleBlockRead -= _notificationSource_SingleBlockRead; } if (_capture != null) { _capture.Stop(); recordingState = RecordingState.Stopped; RaiseSourcePropertyChangedEvent(SourceProperty.RecordingState, _capture.RecordingState); Dispose(); } }
/// <summary> /// Ends the audio capture so the process can end completely. /// </summary> protected override void UnloadContent() { if (soundIn != null) { soundIn.Stop(); soundIn.Dispose(); soundIn = null; } if (source != null) { source.Dispose(); source = null; } }
private void StopCapture() { if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; _finalSource.Dispose(); if (_writer is IDisposable) { ((IDisposable)_writer).Dispose(); } } }
void StopRecordByCSCore() { if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; _finalSource.Dispose(); if (_writer is IDisposable) { ((IDisposable)_writer).Dispose(); } } }
public void addInputSource(MMDevice audioDevice, ProgressBar aProgress) { if (inputDevice != null) { inputCapture.Stop(); } inputDevice = audioDevice; inputProgress = aProgress; inputCapture = new WasapiCapture(); inputCapture.Device = inputDevice; inputCapture.Initialize(); inputCapture.Start(); }
/// <summary> /// Stops the recording and sets the transcription of the closed stream. /// </summary> /// <returns>A Task to await.</returns> public async Task StopRecordingAsync() { try { mAudioCapture.Stop(); while (!_bufferQueue.IsEmpty && StreamingIsBusy) { await Task.Delay(90); } Transcription = mSttClient.FinishStream(mSttStream); } catch (Exception e) { Console.WriteLine(e.Message); } }
public void addOutputSource(MMDevice audioDevice, ProgressBar aProgress) { if (outputDevice != null) { outputCapture.Stop(); } outputDevice = audioDevice; outputProgress = aProgress; outputCapture = new WasapiLoopbackCapture(); outputCapture.Device = outputDevice; outputCapture.Initialize(); outputCapture.Start(); }