public void UpdateDevices(MMDevice input, MMDevice output) { outp?.Stop(); outp?.Dispose(); inp?.StopRecording(); inp?.Dispose(); inp = new WasapiCapture(input, true, 5); inp.DataAvailable += OnCapture; buffer = new BufferedWaveProvider(inp.WaveFormat); mixer = new MixingWaveProvider32(); mixer.AddInputStream(buffer); if (resampler == null) { resampler = new AudioResampler(mixer); } else { resampler.Update(mixer); } outp = new WasapiOut(output, AudioClientShareMode.Shared, true, 5); outp.Init(resampler); inp.StartRecording(); outp.Play(); }
private void btStop_Click(object sender, EventArgs e) { if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (sourceStream1 != null) { sourceStream1.StopRecording(); sourceStream1.Dispose(); sourceStream1 = null; } if (this.waveWriter == null) { return; } this.waveWriter.Dispose(); waveWriter2.Dispose(); this.waveWriter = null; waveWriter2 = null; this.sbtRecord.Enabled = false; this.sbtStop.Enabled = false; sbtPlay.Enabled = true; sbtPlay.Focus(); //mix(); }
/* TODO * //If the audio from the computer is silent for about 10 seconds, either NAudio or Wasapi Capture * //stops providing audio samples. This fixes this problem by: * // Detect if there is more than <varNameHere=5s> of silence in the clip. * // If so, check if the length of the current snippet is more than <varNameHere=0.2s> shorter * // than the time since the last snippet. * // If so, find the longest strech of silence within the current audio data snippet (this assumes * // that there is only ever one timeout within an audio data snippet, i.e. snippets are shorter than 20s). * // Then, insert the missing duration of silence into strech of silence (in the middle to be safe). * private void FillInUncapturedSilence(WaveInEventArgs e) * { * var realtimeSampleDuration = _sampleDurationStopwatch.ElapsedMilliseconds; * _sampleDurationStopwatch.Stop(); * * e. * * _sampleDurationStopwatch.Restart(); * } */ private void RecordingStopped(object sender, StoppedEventArgs e) { _wasapiIn.Dispose(); _wavOut.Dispose(); File.Move(this.FullOutputPath, Path.ChangeExtension(this.FullOutputPath, ".wav")); this.CurrentExtension = ".wav"; HasStopped = true; Stopped?.Invoke(this, EventArgs.Empty); }
public void Dispose() { if (_waveIn != null) { _waveIn.StopRecording(); _waveIn.Dispose(); _waveIn = null; } if (_writer != null) { _writer.Close(); _writer = null; } }
public void Dispose() { PSE.Enable = false; PSE.VolumeRequest -= PSE_VolumeRequest; PSE.ExtractedDegreeOfRisk -= PSE_ExtractedDegreeOfRisk; PSEFadeTimer.Stop(); PSEFadeTimer.Dispose(); PSE.Dispose(); if (Mixer != null) { Mixer.RemoveAllMixerInputs(); } if (WasapiCapture != null) { WasapiCapture.Dispose(); } if (BufferedWaveProvider != null) { BufferedWaveProvider.ClearBuffer(); } Mixer = null; WasapiCapture = null; BufferedWaveProvider = null; }
private void StopRecording() { m_capture.Stop(); m_capture.Dispose(); m_ww.Dispose(); }
/// 録音を停止します private void stopRecording(bool dispose = false) { if (mCapture == null) { return; } // 録音デバイスの停止 if (mCapture.CaptureState != CaptureState.Stopped) { mCapture.StopRecording(); // 録音内容の書き出し mWriter.Dispose(); mWriter = null; } // その他録音後処理 mRecordingFormat = null; // UI更新 startRecordingButton.Enabled = true; stopRecordingButton.Enabled = false; inputDeviceListSelector.Enabled = true; inputChannelSelector.Enabled = true; // デバイスの破棄等 if (dispose) { mCapture.Dispose(); mCapture = null; } }
public void Dispose() { _microphoneBuffer?.Dispose(); _soundIn?.Dispose(); _soundInSource?.Dispose(); _convertedSource?.Dispose(); }
public bool stopRecording() { bool isStopped = false; if (System.Web.HttpContext.Current.Session[ww] != null && System.Web.HttpContext.Current.Session[wc] != null) { capture = (WasapiCapture)System.Web.HttpContext.Current.Session[wc]; w = (WaveWriter)System.Web.HttpContext.Current.Session[ww]; //stop recording capture.Stop(); w.Dispose(); w = null; capture.Dispose(); capture = null; System.Web.HttpContext.Current.Session[ww] = null; System.Web.HttpContext.Current.Session[wc] = null; //Label1.Text = "Stopped"; } else { isStopped = true; } return(isStopped); }
private void Stop() { if (!recording) { return; } outputCapture.Stop(); inputCapture.Stop(); process.StandardInput.Write('q'); process.WaitForExit(1000); process.Close(); groupBox1.Enabled = true; groupBox2.Enabled = true; groupBox3.Enabled = true; groupBox4.Enabled = true; if (outputWaveWriter != null) { outputWaveWriter.Dispose(); } if (inputWaveWriter != null) { inputWaveWriter.Dispose(); } outputCapture.Dispose(); inputCapture.Dispose(); recording = false; }
public void Stop() { //SoundSpectrum.Stop(); if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; } if (_source != null) { _source.Dispose(); _source = null; } }
public void stopRecording() { isRecording = false; micCapture.Stop(); speakCapture.Stop(); micWriter.Dispose(); speakWriter.Dispose(); micCapture.Dispose(); speakCapture.Dispose(); soundout.Stop(); soundout.Dispose(); string micSize = "-", speakSize = "-"; if (File.Exists(micFileName)) { FileInfo f = new FileInfo(micFileName); int mbytes = (int)(f.Length / 1024 / 1024); micSize = mbytes.ToString(); } else { MessageBox.Show("No file with name\n " + micFileName + "\nexists.\n\nMicrophone may not have been recorded.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } if (File.Exists(speakFileName)) { FileInfo f = new FileInfo(speakFileName); int mbytes = (int)(f.Length / 1024 / 1024); speakSize = mbytes.ToString(); } window.updateInfo(micFileName, micSize, speakFileName, speakSize); window.UnlockUI(); }
private void StopCSCore() { if (_soundOut != null) { _soundOut.Stop(); _soundOut.Dispose(); _soundOut = null; } if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; } if (_source != null) { _source.Dispose(); _source = null; } if (_lineSpectrum != null) { _lineSpectrum = null; } }
private void bgInputListener_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { Trace.TraceInformation("Background Microphone Listener closing. State: {0}", state.ToString()); state = States.Stopping; _soundCapture?.Stop(); _soundCapture?.Dispose(); _isSoundAlertPlaying = false; if (_soundOut.PlaybackState != PlaybackState.Stopped) { _soundOut.Stop(); } _soundOut.Dispose(); state = States.Stopped; Close(); }
public void SoundInToSoundOutTest_Wasapi() { for (int i = 0; i < 10; i++) { var waveIn = new WasapiCapture(); waveIn.Initialize(); waveIn.Start(); var waveInToSource = new SoundInSource(waveIn) { FillWithZeros = true }; var soundOut = new WasapiOut(); soundOut.Initialize(waveInToSource); soundOut.Play(); Thread.Sleep(2000); Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState); soundOut.Dispose(); waveIn.Dispose(); } }
public void Cleanup() { channelMemoryA?.Clear(); channelMemoryB?.Clear(); channelWriterA?.Dispose(); channelWriterB?.Dispose(); channelStreamA?.Dispose(); channelStreamB?.Dispose(); channelCapture?.Dispose(); fileTimer?.Stop(); fileTimer?.Dispose(); }
public void Dispose() { if (_waveWriter != null) { _waveWriter.Dispose(); _waveWriter = null; } if (_notificationSource != null) { _notificationSource.Dispose(); _notificationSource = null; } if (_waveSource != null) { _waveSource.Dispose(); _waveSource = null; } if (_soundInSource != null) { _soundInSource.Dispose(); _soundInSource = null; } if (_capture != null) { _capture.Dispose(); _capture = null; } }
private void OnRecordingStopped(object sender, StoppedEventArgs err) { if (err.Exception != null) { InfoMessage?.Invoke(this, $"Ошибка: {err.Exception.Message}"); } ffmpegProcess?.StandardOutput.Close(); ffmpegProcess?.StandardInput.Close(); ffmpegProcess?.Kill(); _audioCapture.RecordingStopped -= OnRecordingStopped; _audioCapture.DataAvailable -= OnDataAvailable; _audioCapture.Dispose(); _audioCapture = null; _threadSafeBoolBackValue = 0; Task.Run(() => { _transportService.SendFinalData(); }).Wait(); Task.Run(() => { _transportService.CloseConnection(); }).Wait(); InfoMessage?.Invoke(this, "Запись остановлена"); RecordLevel?.Invoke(this, 0.0F); RecordStopped?.Invoke(this, EventArgs.Empty); }
public void Dispose() { //Dont dispose capture device _wasapiCapture?.Stop(); _captureSource?.Dispose(); _wasapiCapture?.Dispose(); _opusEncoder?.Dispose(); }
private void DisposeAudioDevice() { _audioIn.Dispose(); IsListening = false; OnAudioEventAvailable(new AudioEventArgs { State = AudioRecordState.ListeningStopped, Information = "Stopped Listening." }); }
private void RecorderOnDataEnds(object sender, StoppedEventArgs stoppedEventArgs) { if (realMic != null) { realMic.Dispose(); realMic = null; } }
public void StopForwarding() { _recorder?.StopRecording(); _player?.Stop(); _recorder?.Dispose(); _player?.Dispose(); }
public void StopListen() { if (capture.RecordingState == RecordingState.Recording) { capture.Stop(); } capture.Dispose(); }
/// <summary> /// Stops the audio capture and disposes of the resources used by the recorder. /// </summary> public void Dispose() { Interlocked.Exchange(ref _isRunning, 0); _capture.Stop(); _convertedSource.Dispose(); _soundInSource.Dispose(); _capture.Dispose(); }
public void Free() { if (initialized) { capture.Stop(); capture.Dispose(); } }
public void Dispose() { if (_dummyCapture != null) { _dummyCapture.Dispose(); _dummyCapture = null; } }
void OnApplicationQuit() { if (enabled) { capture.Stop(); capture.Dispose(); } }
private static void CSCore_Cleanup() { if (capture != null) { capture.Dispose(); capture = null; } UpdateStatusMessage.ShowStatusMessage(2, "Capture Destroyed"); }
//BLARG 01.14.2020: Added the rest of the disposables since we'll be calling this method a lot more public void Dispose() { //_capture?.Stop(); //Don't need this, Dispose() takes care of it _capture?.Dispose(); _soundInSource?.Dispose(); _source?.Dispose(); _stream?.Dispose(); _audioEndpointVolume?.Dispose(); }
private void EnableCaptureEndpoint() { if (_dummyCapture != null) { _dummyCapture.Dispose(); _dummyCapture = null; } if (Endpoint != null && Endpoint.DataFlow == DataFlow.Capture) { _dummyCapture = new WasapiCapture(true, AudioClientShareMode.Shared, 250) { Device = Endpoint }; _dummyCapture.Initialize(); _dummyCapture.Start(); } }