void Start() { fftData = new float[fftSize]; persistentSamples = new FixedQueue <float> [PersSampleUpperIndex - PersSampleLowerIndex]; smoothedSamples = new float[persistentSamples.Length]; for (int i = 0; i < persistentSamples.Length; i++) { persistentSamples[i] = new FixedQueue <float>(PersistenSampleLength); } line = GetComponent <LineRenderer>(); leftChannel = new float[TotalSamples]; capture = new WasapiLoopbackCapture(); capture.Initialize(); var soundInSource = new SoundInSource(capture); var source = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out pitchShifter); fft1 = new FftTransform(source.WaveFormat.Channels, fftSize); fft2 = new FftProvider(source.WaveFormat.Channels, FftSize.Fft2048); stream = new SingleBlockNotificationStream(pitchShifter); stream.SingleBlockRead += SingleBlockRead; waveSource = stream.ToWaveSource(16); buffer = new byte[waveSource.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += DataAvailable; capture.DataAvailable += (sender, args) => DataAvailable(sender, args); capture.Start(); }
/// <summary> /// Sets up the audio device, and the file to record into, adds listeners to the events, starts recording, and toggles the buttons. /// </summary> private void record() { try { var device = (MMDevice)AudioInputDeviceComboBox.SelectedItem; if (!(device == null)) { recordButton.Text = stopText; recordingProgressBar.Value = 0; device.AudioEndpointVolume.Mute = false; // Use wasapi by default waveIn = new WasapiCapture(device); waveIn.DataAvailable += OnDataAvailable; onDataAvailableSubscribed = true; waveIn.RecordingStopped += OnRecordingStopped; writer = new WaveFileWriter(audioFilePath, waveIn.WaveFormat); waveIn.StartRecording(); SetControlStates(true); } else { recordButton.Text = recordText; MPAiMessageBoxFactory.Show(noAudioDeviceText, warningText, MPAiMessageBoxButtons.OK); } } catch (Exception exp) { #if DEBUG MPAiMessageBoxFactory.Show(exp.Message, warningText, MPAiMessageBoxButtons.OK); #endif } }
private void Record() { try { _capture = new WasapiCapture(SelectedDevice) { ShareMode = ShareModeIndex == 0 ? AudioClientShareMode.Shared : AudioClientShareMode.Exclusive, WaveFormat = SampleTypeIndex == 0 ? WaveFormat.CreateIeeeFloatWaveFormat(SampleRate, ChannelCount) : new WaveFormat(SampleRate, BitDepth, ChannelCount) }; _currentFileName = $"Burp_N0_{DateTime.Now:yyy_dd_MM_HH_mm_ss}.wav"; RecordLevel = SelectedDevice.AudioEndpointVolume.MasterVolumeLevelScalar; _capture.StartRecording(); _capture.DataAvailable += CaptureOnDataAvailable; _capture.RecordingStopped += OnRecordingStopped; RecordCommand.IsEnabled = false; StopCommand.IsEnabled = true; Message = "Recording..."; } catch (Exception e) { MessageBox.Show(e.Message); } }
public void SoundInToSoundOutTest_Wasapi() { for (int i = 0; i < 10; i++) { var waveIn = new WasapiCapture(); waveIn.Initialize(); waveIn.Start(); var waveInToSource = new SoundInSource(waveIn) { FillWithZeros = true }; var soundOut = new WasapiOut(); soundOut.Initialize(waveInToSource); soundOut.Play(); Thread.Sleep(2000); Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState); soundOut.Dispose(); waveIn.Dispose(); } }
public SoundCapture() { // This uses the wasapi api to get any sound data played by the computer capture = new WasapiLoopbackCapture(); capture.Initialize(); // Get our capture as a source IWaveSource source = new SoundInSource(capture); // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs // This is the typical size, you can change this for higher detail as needed fftSize = FftSize.Fft4096; // Actual fft data fftBuffer = new float[(int)fftSize]; // Tells us when data is available to send to our spectrum var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource()); notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead; // We use this to request data so it actualy flows through (figuring this out took forever...) finalSource = notificationSource.ToWaveSource(); capture.DataAvailable += Capture_DataAvailable; capture.Start(); }
public void Dispose() { if (_waveWriter != null) { _waveWriter.Dispose(); _waveWriter = null; } if (_notificationSource != null) { _notificationSource.Dispose(); _notificationSource = null; } if (_waveSource != null) { _waveSource.Dispose(); _waveSource = null; } if (_soundInSource != null) { _soundInSource.Dispose(); _soundInSource = null; } if (_capture != null) { _capture.Dispose(); _capture = null; } }
public void StartRecordingSetDevice(MMDevice recordingDevice) { if (recordingDevice == null) { MessageBox.Show(Properties.Strings.MessageBox_NoRecordingDevices); Console.WriteLine("No devices found."); return; } soundIn = new CSCore.SoundIn.WasapiLoopbackCapture { Device = recordingDevice }; soundIn.Initialize(); soundInSource = new SoundInSource(soundIn) { FillWithZeros = false }; convertedSource = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16); convertedSource = convertedSource.ToStereo(); soundInSource.DataAvailable += OnDataAvailable; soundIn.Start(); var format = convertedSource.WaveFormat; waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample); }
private void StopCSCore() { if (_soundOut != null) { _soundOut.Stop(); _soundOut.Dispose(); _soundOut = null; } if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; } if (_source != null) { _source.Dispose(); _source = null; } if (_lineSpectrum != null) { _lineSpectrum = null; } }
private void GetCapture(bool isController, string deviceId = null) { if (!isController) { if (_soundCapture != null) { _soundCapture?.Stop(); _soundCapture?.Dispose(); } using (MMDeviceEnumerator enumerator = new MMDeviceEnumerator()) { //using (MMDevice device = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Communications)) //{ MMDevice device; if (deviceId != null) { device = enumerator.GetDevice(deviceId); } else { device = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, _currentCaptureRole); } _meter = AudioMeterInformation.FromDevice(device); _soundCapture = new WasapiCapture(true, AudioClientShareMode.Shared, 250) { Device = device }; _soundCapture.Initialize(); _soundCapture.Start(); //} } } }
protected virtual void Dispose(bool disposing) { if (disposing) { // free managed resources if (soundInSource != null) { soundInSource.Dispose(); soundInSource = null; } if (mMDeviceEnumerator != null) { mMDeviceEnumerator.Dispose(); mMDeviceEnumerator = null; } if (mMNotificationClient != null) { mMNotificationClient.Dispose(); mMNotificationClient = null; } if (_soundIn != null) { _soundIn.Dispose(); _soundIn = null; } } }
public void Initialize() { if (Initialized) { return; } // get default device. var deviceCapture = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eCapture, Role); var deviceRender = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eRender, Role); if (deviceCapture == null || deviceRender == null) { OnStateChanged?.Invoke(EMicState.InitializeFailed); return; } capture = new WasapiCapture(deviceCapture); // Captureデバイスの準備 render = new WasapiRender(deviceRender, ShareMode, true, 0); // Renderデバイスの準備 capture.Initialize(); render.Initialize(capture.WaveProvider); capture.StoppedEvent += OnCaptureStopped; render.StoppedEvent += OnCaptureStopped; Debug.WriteLine(string.Format("capture format:{0}", capture.WaveFormat)); Debug.WriteLine(string.Format("render format:{0}", render.WaveFormat)); deviceEnumerator.OnDefaultDeviceChanged += DeviceChanged; Initialized = true; OnStateChanged?.Invoke(EMicState.Initialized); }
//Safe to use Stopwatch (which is from System.Diagnostics) in production code: //https://stackoverflow.com/questions/2805362/can-stopwatch-be-used-in-production-code public AudioRecorder(string outputFolder, string recordingName) { this.OutputFolder = outputFolder; this.RecordingName = recordingName; this.OutputPath = Path.Combine(this.OutputFolder, recordingName); this.CurrentExtension = ".wav-recording"; //From original project: // // //Get Deivce // // MMDeviceEnumerator enumerator = new MMDeviceEnumerator(); // // MMDevice defaultAudioDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); _wasapiIn = new WasapiLoopbackCapture(); //TODO: Check if WasapiCapture should/can be used instead _wasapiIn.DataAvailable += this.AudioDataAvailable; _wasapiIn.RecordingStopped += this.RecordingStopped; this.WaveFormat = _wasapiIn.WaveFormat; _wavOut = new WaveFileWriter( this.FullOutputPath, this.WaveFormat ); _wasapiIn.StartRecording(); _sampleDurationStopwatch = Stopwatch.StartNew(); }
public void Dispose() { PSE.Enable = false; PSE.VolumeRequest -= PSE_VolumeRequest; PSE.ExtractedDegreeOfRisk -= PSE_ExtractedDegreeOfRisk; PSEFadeTimer.Stop(); PSEFadeTimer.Dispose(); PSE.Dispose(); if (Mixer != null) { Mixer.RemoveAllMixerInputs(); } if (WasapiCapture != null) { WasapiCapture.Dispose(); } if (BufferedWaveProvider != null) { BufferedWaveProvider.ClearBuffer(); } Mixer = null; WasapiCapture = null; BufferedWaveProvider = null; }
/// <summary> /// Begin the audio input /// </summary> public static void InitAudioSource(MMDevice device) { Stop(); //open default audio device m_SoundIn = new WasapiLoopbackCapture(); m_SoundIn.Device = device; m_SoundIn.Initialize(); var soundInSource = new SoundInSource(m_SoundIn); ISampleSource source = soundInSource.ToSampleSource(); SetupSampleSource(source); byte[] buffer = new byte[m_Source.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += (s, aEvent) => { int read; while ((read = m_Source.Read(buffer, 0, buffer.Length)) > 0) { ; } }; m_SoundIn.Start(); MainWindow.StartTimer(); }
public void StartRecord() { try { _audioCapture = new WasapiLoopbackCapture(); ffmpegProcess = new Process(); ffmpegProcess.StartInfo.FileName = "ffmpeg.exe"; ffmpegProcess.StartInfo.Arguments = $"-f f32le -ac 2 -ar 44100 -i - -ar 8000 -ac 1 -f s16le -"; ffmpegProcess.StartInfo.RedirectStandardInput = true; ffmpegProcess.StartInfo.RedirectStandardOutput = true; ffmpegProcess.StartInfo.UseShellExecute = false; ffmpegProcess.StartInfo.CreateNoWindow = true; ffmpegProcess.Start(); _audioCapture.RecordingStopped += OnRecordingStopped; _audioCapture.DataAvailable += OnDataAvailable; InfoMessage?.Invoke(this, "Запись..."); _audioCapture.StartRecording(); } catch (Exception e) { InfoMessage?.Invoke(this, $"Ошибка: {e.Message}"); } }
public AudioCapture(int sampleRate, int sampleSize) { this.sampleRate = sampleRate; this.sampleSize = sampleSize; if (sampleSize <= 0) { throw new ArgumentException("Sample size must be > 0, instead it is " + sampleSize); } resSamples = new float[this.sampleSize]; var ayy = new MMDeviceEnumerator(); // This uses the wasapi api to get any sound data played by the computer capture = new WasapiCapture(false, AudioClientShareMode.Shared); capture.Device = ayy.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Multimedia); capture.Initialize(); capture.DataAvailable += Capture_DataAvailable; IWaveSource source = new SoundInSource(capture); dataSource = new PureDataSource(new WaveFormat(sampleRate, 8, 1), source.ToSampleSource()); dataSource.OnDataRead += DataSource_OnDataRead; finalSource = dataSource.ToWaveSource(); capture.Start(); }
private void OnRecordingStopped(object sender, StoppedEventArgs err) { if (err.Exception != null) { InfoMessage?.Invoke(this, $"Ошибка: {err.Exception.Message}"); } ffmpegProcess?.StandardOutput.Close(); ffmpegProcess?.StandardInput.Close(); ffmpegProcess?.Kill(); _audioCapture.RecordingStopped -= OnRecordingStopped; _audioCapture.DataAvailable -= OnDataAvailable; _audioCapture.Dispose(); _audioCapture = null; _threadSafeBoolBackValue = 0; Task.Run(() => { _transportService.SendFinalData(); }).Wait(); Task.Run(() => { _transportService.CloseConnection(); }).Wait(); InfoMessage?.Invoke(this, "Запись остановлена"); RecordLevel?.Invoke(this, 0.0F); RecordStopped?.Invoke(this, EventArgs.Empty); }
//most of this code is stolen from the example in the CSCore github so idk what it does 40% of the time public void Initialize(FFTSize _size = FFTSize._4096) { size = _size; _soundIn = new WasapiLoopbackCapture(); _soundIn.Initialize(); var soundInSource = new SoundInSource(_soundIn); var source = soundInSource.ToSampleSource(); _fft = new FftProvider(source.WaveFormat.Channels, (FftSize)size); var n = new SingleBlockNotificationStream(source); n.SingleBlockRead += (s, a) => _fft.Add(a.Left, a.Right); _source = n.ToWaveSource(16); byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond]; soundInSource.DataAvailable += (s, aEvent) => { int read; while ((read = _source.Read(buffer, 0, buffer.Length)) > 0) { ; } }; _soundIn.Start(); }
public void CSCoreAudioRecording() { using (capture = new WasapiLoopbackCapture()) { aTimer = new Timer(); aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent); //jak czas minie, wyłącz nagrywanie aTimer.Interval = 8000; //czas nagrywania //inicjalizacja urządzenia do nagrywania capture.Initialize(); using (writer = new WaveWriter("dump.wav", capture.WaveFormat)) { capture.DataAvailable += (s, e) => { //save the recorded audio writer.Write(e.Data, e.Offset, e.ByteCount); }; //start recording capture.Start(); aTimer.Enabled = true; Console.WriteLine("Rozpoczęto nagrywanie."); Console.ReadKey(); } } }
private void button_Click(object sender, RoutedEventArgs e) { MMDevice dev = (MMDevice)comboBox.SelectedItem; if (mmdevicesOut.Contains(dev)) { capture = new WasapiLoopbackCapture(); } else { capture = new WasapiCapture(); } capture.Device = dev; capture.Initialize(); w = new WasapiOut(); w.Device = (MMDevice)comboBox_Copy.SelectedItem; w.Initialize(new SoundInSource(capture) { FillWithZeros = true }); capture.Start(); w.Play(); }
public override void SetProperties(UserControl[] _controls) { MMDeviceEnumerator e = new MMDeviceEnumerator(); var ep = e.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active); base.SetProperties(_controls); var ctrl = _controls.OfType <PropertiesControls.SchedulerControl>().First(); var mmi = _controls.OfType <PropertiesControls.ComboBoxControl>().First(); AudioDevice = (string)mmi.Value; _cancelled = true; var device = ep.Where(x => x.FriendlyName == AudioDevice).FirstOrDefault(); if (device != null) { _cancelled = false; _capture = new WasapiCapture(device); _capture.WaveFormat = new WaveFormat(44100, 8, 2); //lc.WaveFormat = new NAudio.Wave.WaveFormat(44100, 8, 2); _capture.DataAvailable += Lc_DataAvailable; _capture.StartRecording(); } _events.Clear(); foreach (var item in ctrl.Events.OrderBy(x => x.A.Ticks)) { _events.Add(item); } }
private IWaveIn CreateWaveInDevice() { IWaveIn newWaveIn; if (radioButtonWaveIn.Checked) { newWaveIn = new WaveIn(); newWaveIn.WaveFormat = new WaveFormat(8000, 1); } else if (radioButtonWaveInEvent.Checked) { newWaveIn = new WaveInEvent(); newWaveIn.WaveFormat = new WaveFormat(8000, 1); } else if (radioButtonWasapi.Checked) { // can't set WaveFormat as WASAPI doesn't support SRC var device = (MMDevice)comboWasapiDevices.SelectedItem; ///////////// newWaveIn = new WasapiCapture(device); //newWaveIn.WaveFormat.Channels = 1; //newWaveIn.WaveFormat.BitsPerSample = 16; // newWaveIn.WaveFormat.SampleRate = 22050; } else { // can't set WaveFormat as WASAPI doesn't support SRC newWaveIn = new WasapiLoopbackCapture(); } newWaveIn.DataAvailable += OnDataAvailable; newWaveIn.RecordingStopped += OnRecordingStopped; return(newWaveIn); }
private void InitializeAudio() { if (this.config.audioDeviceID == null) { throw new Exception("audioDeviceID not set!"); } var iterator = new MMDeviceEnumerator().EnumerateAudioEndPoints( DataFlow.Capture, DeviceState.Active ); MMDevice device = null; foreach (var audioDevice in iterator) { if (this.config.audioDeviceID == audioDevice.ID) { device = audioDevice; break; } } if (device == null) { throw new Exception("audioDeviceID not set!"); } this.recordingDevice = device; // Windows audio format available in the Sounds control panel (mmsys.cpl) // We standardize around 44.1 kHz, 16-bit PCM (signed) 2 channel audio this.captureStream = new WasapiCapture(device, false, 16); this.captureStream.WaveFormat = new WaveFormat(audioFormatSampleFrequency, 16, 2); this.captureStream.DataAvailable += Update; this.captureStream.StartRecording(); }
public void UpdateDevices(MMDevice input, MMDevice output) { outp?.Stop(); outp?.Dispose(); inp?.StopRecording(); inp?.Dispose(); inp = new WasapiCapture(input, true, 5); inp.DataAvailable += OnCapture; buffer = new BufferedWaveProvider(inp.WaveFormat); mixer = new MixingWaveProvider32(); mixer.AddInputStream(buffer); if (resampler == null) { resampler = new AudioResampler(mixer); } else { resampler.Update(mixer); } outp = new WasapiOut(output, AudioClientShareMode.Shared, true, 5); outp.Init(resampler); inp.StartRecording(); outp.Play(); }
private void button_start_Click(object sender, EventArgs e) { wavein = null; wavein = new WasapiCapture(false, AudioClientShareMode.Exclusive, 5); wavein.Device = inputDevices[comboBox_mic.SelectedIndex]; wavein.Initialize(); wavein.Start(); source = new SoundInSource(wavein) { FillWithZeros = true }; //add my special effects in the chain efxProcs = new EfxProcs(source.ToSampleSource().ToMono()); efxProcs.gain = linearGain; //keep track of this changing value efxProcs.pitchFactor = pitchShift; //keep track of pitch waveout = null; waveout = new WasapiOut(false, AudioClientShareMode.Exclusive, 5); waveout.Device = outputDevices[comboBox_speaker.SelectedIndex]; waveout.Initialize(efxProcs.ToWaveSource()); //source.ToSampleSource().ToWaveSource());// waveout.Play(); //CSCore.Streams.SampleConverter.SampleToIeeeFloat32 sampleToIeee = new CSCore.Streams.SampleConverter.SampleToIeeeFloat32(source.ToSampleSource()); timer1.Enabled = true; }
public Visualization() { new Thread(() => { Stop(); //open the default device _soundIn = new WasapiLoopbackCapture(); _soundIn.Initialize(); var soundInSource = new SoundInSource(_soundIn); SetupSampleSource(soundInSource.ToSampleSource()); // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += (s, aEvent) => { int read; while ((read = _source.Read(buffer, 0, buffer.Length)) > 0) { ; } }; _soundIn.Start(); //play the audio _Timer.Elapsed += new ElapsedEventHandler(GenerateEvent); _Timer.Start(); }).Start(); }
// Safe to call even if not capturing void StopCapture() { // IXxxSource wrappers dispose of their wrapped IXxxSource instances, // but SoundInSource won't dispose of the WasapiCapture m_AudioCapture that it wraps. // WasapiCapture doesn't dispose of its .Device. // Make copies, because C# lambda captures act like references var finalSource = m_FinalSource; var audioCapture = m_AudioCapture; m_FinalSource = null; m_AudioCapture = null; m_SampleRate = 0; m_FriendlyName = null; if (finalSource != null || audioCapture != null) { // Don't dispose on our precious main thread; Wasapi's .Stop() has a thread join :( new Future <object>(() => { if (finalSource != null) { finalSource.Dispose(); } if (audioCapture != null) { audioCapture.Dispose(); } return(null); }); } }
private void OpenDefault() { Stop(); //open the default device _soundIn = new WasapiLoopbackCapture(); //Our loopback capture opens the default render device by default so the following is not needed //_soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console); _soundIn.Initialize(); var soundInSource = new SoundInSource(_soundIn); ISampleSource source = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter); SetupSampleSource(source); // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += (s, aEvent) => { int read; while ((read = _source.Read(buffer, 0, buffer.Length)) > 0) { ; } }; //play the audio _soundIn.Start(); timer.Start(); }
private void btStop_Click(object sender, EventArgs e) { if (sourceStream != null) { sourceStream.StopRecording(); sourceStream.Dispose(); sourceStream = null; } if (sourceStream1 != null) { sourceStream1.StopRecording(); sourceStream1.Dispose(); sourceStream1 = null; } if (this.waveWriter == null) { return; } this.waveWriter.Dispose(); waveWriter2.Dispose(); this.waveWriter = null; waveWriter2 = null; this.sbtRecord.Enabled = false; this.sbtStop.Enabled = false; sbtPlay.Enabled = true; sbtPlay.Focus(); //mix(); }
public IWaveIn Create(int latency) { var device = new MMDeviceEnumerator().GetDevice(deviceId); var wasapi = new WasapiCapture(device); return(wasapi); }