public void Dispose() { StopPlayback(); _soundOut?.Dispose(); _soundSource?.Dispose(); _simpleNotificationSource?.Dispose(); _currentMemoryStream?.Dispose(); }
public void Dispose() { wasapiCapture_.Stop(); writer_.Dispose(); stereoSource_.Dispose(); wasapiCapture_.Dispose(); }
private void StopCSCore() { if (_soundOut != null) { _soundOut.Stop(); _soundOut.Dispose(); _soundOut = null; } if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; } if (_source != null) { _source.Dispose(); _source = null; } if (_lineSpectrum != null) { _lineSpectrum = null; } }
public void Dispose() { _sound.Stop(); _sound.Dispose(); _source.Dispose(); Equalizer.Dispose(); }
public void Dispose() { if (_waveWriter != null) { _waveWriter.Dispose(); _waveWriter = null; } if (_notificationSource != null) { _notificationSource.Dispose(); _notificationSource = null; } if (_waveSource != null) { _waveSource.Dispose(); _waveSource = null; } if (_soundInSource != null) { _soundInSource.Dispose(); _soundInSource = null; } if (_capture != null) { _capture.Dispose(); _capture = null; } }
public void Dispose() { _microphoneBuffer?.Dispose(); _soundIn?.Dispose(); _soundInSource?.Dispose(); _convertedSource?.Dispose(); }
public virtual void OnCleanup() { PrintCurrentTestClass(); SourceToTest.Dispose(); _sourceToTest = null; }
private void mainWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e) { dx11.Dispose(); midiOutput.Dispose(); fileReader.Dispose(); waveOut.Dispose(); }
public void Dispose() { //Dont dispose capture device _wasapiCapture?.Stop(); _captureSource?.Dispose(); _wasapiCapture?.Dispose(); _opusEncoder?.Dispose(); }
/// <summary> /// Stops the audio capture and disposes of the resources used by the recorder. /// </summary> public void Dispose() { Interlocked.Exchange(ref _isRunning, 0); _capture.Stop(); _convertedSource.Dispose(); _soundInSource.Dispose(); _capture.Dispose(); }
/// <summary> /// Disposes the <see cref="Source"/>. /// </summary> /// <param name="disposing">Not used.</param> protected virtual void Dispose(bool disposing) { if (Source != null) { Source.Dispose(); Source = null; } }
public static void CleanupPlayback() { //client.Dispose(); soundOut.Dispose(); soundOut = null; waveSource.Dispose(); waveSource = null; }
public void Dispose(AudioData o) { IWaveSource iws = o.data as IWaveSource; iws.Dispose(); o.data = null; // added to help catch any sequencing errors //System.Diagnostics.Debug.WriteLine("Audio disposed"); }
public static TimeSpan GetLength(string filename) { IWaveSource source = CodecFactory.Instance.GetCodec(filename); TimeSpan length = source.GetLength(); source.Dispose(); return(length); }
public void Stop() { soundOut.Stop(); if (soundSource != null) { soundSource.Dispose(); soundSource = null; } }
public override void Dispose() { foreach (var instance in _instances.Where(instance => instance != null)) { instance.Dispose(); } _waveSource.Dispose(); }
private void VisualizerForm_FormClosing(object sender, FormClosingEventArgs e) { FrameTimer.Stop(); Matrix.Clear(); SoundIn?.Stop(); SoundIn?.Dispose(); Source?.Dispose(); }
//BLARG 01.14.2020: Added the rest of the disposables since we'll be calling this method a lot more public void Dispose() { //_capture?.Stop(); //Don't need this, Dispose() takes care of it _capture?.Dispose(); _soundInSource?.Dispose(); _source?.Dispose(); _stream?.Dispose(); _audioEndpointVolume?.Dispose(); }
void StopListen() { singleBlockNotificationStream.SingleBlockRead -= SingleBlockNotificationStream_SingleBlockRead; soundInSource.Dispose(); realTimeSource.Dispose(); loopbackCapture.Stop(); loopbackCapture.Dispose(); }
public void StopListen() { _singleBlockNotificationStream.SingleBlockRead -= SingleBlockNotificationStream_SingleBlockRead; _soundInSource.Dispose(); _realtimeSource.Dispose(); _receiveAudio = null; _wasapiCapture.Stop(); _wasapiCapture.Dispose(); }
/// <summary> /// Frees resources taken by CSCore. /// </summary> public void Dispose() { if (!m_disposed) { m_decoder.Dispose(); m_source.Dispose(); m_disposed = true; } }
private void StopListening() { blockNotifyStream.SingleBlockRead -= SingleBlockRead; soundIn.Dispose(); realtime.Dispose(); loopback?.Stop(); loopback?.Dispose(); }
public void CloseWaveFile() { StopPlaying(); mFilename = ""; progress.Visible = true; hrScroll.Enabled = false; cursorTimer.Enabled = false; playTimer.Enabled = false; mDrawWave = false; mSamplesPerPixel = 0; mSelectionStartX = 0; mSelectionEndX = 0; mSelectionStartSample = 0; mSelectionEndSample = 0; mPrevX = 0; mDrawingStartOffset = 0; CursorPositionX = 0; mPrevWidth = 0; mPrevHeight = 0; mPrevOffset = 0; mPrevSamplesPerPixel = 0; if (mWaveSource != null) { mWaveSource.Dispose(); } mWaveSource = null; if (mDrawSource != null) { mDrawSource.Dispose(); } mDrawSource = null; if (mBitmap != null) { mBitmap.Dispose(); } mBitmap = null; String timeZero = "00:00:00.00"; lblCursorPos.Text = timeZero; lblSelectStartPos.Text = timeZero; lblSelectEndPos.Text = timeZero; lblSelectLength.Text = timeZero; if (mRawDrawReader != null) { mRawDrawReader.Dispose(); } if (mRawPlayReader != null) { mRawPlayReader.Dispose(); } if (System.IO.File.Exists(mRawFileName)) { System.IO.File.Delete(mRawFileName); } }
public async void Play() { MMDevice[] playDevices = GetEnabledDevices(); Log.WriteLine("Playing \"{0}\" with volume {1:0.00} on devices: {2}", name, GetVolume(), String.Join <string>(", ", playDevices.Select(d => d.FriendlyName))); List <ManualResetEvent> mres = new List <ManualResetEvent>(); foreach (MMDevice device in playDevices) { WasapiOut audioOut = new WasapiOut(); IWaveSource source = CodecFactory.Instance.GetCodec(this.source); Log.WriteLine("Source format: {0}", source.WaveFormat.ToString()); audioOut.Device = device; audioOut.Initialize(source); audioOut.Volume = GetVolume(); ManualResetEvent mre = new ManualResetEvent(false); mres.Add(mre); audioOut.Stopped += (s, e) => { audioOut.Dispose(); source.Dispose(); mre.Set(); Log.WriteLine("\"{0}\" stopped on device \"{1}\"", name, device.FriendlyName); }; audioOuts.Add(new Tuple <WasapiOut, IWaveSource>(audioOut, source)); audioOut.Play(); } Log.WriteLine("Waiting for all ({0}) events to trigger", mres.Count); await Task.Run(() => { while (true) { if (WaitHandle.WaitAll(mres.ToArray(), positionUpdateFreq)) { return; } PositionUpdate(this, new PositionUpdateEventArgs(GetPosition())); } }); Log.WriteLine("\"{0}\" stopped", name); audioOuts.Clear(); if (Stopped != null) { Stopped(this, EventArgs.Empty); } }
private void initializeSoundSource(MusicEntity music) { iws?.Dispose(); iws = GetCodec(music.Extension, music.Path); iws = lfw.Initialize(iws.ToSampleSource() .ChangeSampleRate(32000) .AppendSource(Equalizer.Create10BandEqualizer, out mEqualizer)); wasapiOut.Initialize(iws); Volume = 0.7f; wasapiOut.Volume = Volume; }
public void StopListen() { _isRunning = false; _singleBlockNotificationStream.SingleBlockRead -= singleBlockNotificationStream_SingleBlockRead; _soundInSource.Dispose(); _realtimeSource.Dispose(); _receiveAudio = null; _loopbackCapture.Stop(); _loopbackCapture.Dispose(); }
private void StopAudioLoopback() { _soundIn.Stop(); _soundIn.Dispose(); _finalSource.Dispose(); _stdOut.Stop(); _oggEncProcess.StandardInput.Close(); _oggEncProcess.StandardOutput.Close(); _oggEncProcess.WaitForExit(); _oggEncProcess.Dispose(); }
public void StopCapture() { if (capture != null) { Console.WriteLine("Stop Capturing..."); capture.Stop(); capture.Dispose(); capture = null; waveSource.Dispose(); Console.WriteLine("Stopped!"); } }
private void UnloadAudioFile() { if (waveSource != null) { waveSource.Dispose(); } if (soundOut != null) { soundOut.Dispose(); } }
private void CleanupPlayback() { if (_soundOut != null) { _soundOut.Dispose(); _soundOut = null; } if (_waveSource != null) { _waveSource.Dispose(); _waveSource = null; } }