public void SoundInToSoundOutTest_Wasapi() { for (int i = 0; i < 10; i++) { var waveIn = new WasapiCapture(); waveIn.Initialize(); waveIn.Start(); var waveInToSource = new SoundInSource(waveIn) { FillWithZeros = true }; var soundOut = new WasapiOut(); soundOut.Initialize(waveInToSource); soundOut.Play(); Thread.Sleep(2000); Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState); soundOut.Dispose(); waveIn.Dispose(); } }
public static void RecThread() { micAud = new NAudio.Wave.WaveInEvent(); //micAud.WaveFormat = new NAudio.Wave.WaveFormat(44100, 1); //micAud.DataAvailable += MicAud_DataAvailable; //micAud.RecordingStopped += MicAud_RecordingStopped; //// micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded); //wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat); //micAud.StartRecording(); using (spkAud = new CSCore.SoundIn.WasapiLoopbackCapture()) { spkAud.Initialize(); micAud.WaveFormat = new NAudio.Wave.WaveFormat(spkAud.WaveFormat.SampleRate, spkAud.WaveFormat.Channels); micAud.DataAvailable += MicAud_DataAvailable; micAud.RecordingStopped += MicAud_RecordingStopped; // micAud.DataAvailable += (s, capData) => wfw.Write(capData.Buffer, 0, capData.BytesRecorded); wfw = new WaveFileWriter(_micLoc, micAud.WaveFormat); micAud.StartRecording(); using (var w = new WaveWriter(_spkLoc, spkAud.WaveFormat)) { spkAud.DataAvailable += (s, capData) => w.Write(capData.Data, capData.Offset, capData.ByteCount); spkAud.Start(); while (!stopRec) { ; } spkAud.Stop(); micAud.StopRecording(); } } }
/** Initializes the realtime audio processing handlers */ public void BeginRecording() { // recoreds output data from wasapi loopback sound card. using (wasapiCapture = new WasapiLoopbackCapture()) { wasapiCapture.Initialize(); wasapiCaptureSource = new SoundInSource(wasapiCapture); // TODO: Stereo or Mono? using (var stereoSource = wasapiCaptureSource.ToStereo()) { // creates the spectrum Provider (Our own FFTProvider) provider = new BasicSpectrumProvider(stereoSource.WaveFormat.Channels, stereoSource.WaveFormat.SampleRate, fftSize); // creates the handler that uses the SpectrumProvider. var handler = new FFTHandler(FftSize.Fft4096) { SpectrumProvider = provider, UseAverage = true, height = 100, BarCount = 10, BarSpacing = 2, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Sqrt }; // notifies the spectrum provider each block read var notificationSource = new SingleBlockNotificationStream(wasapiCaptureSource.ToSampleSource()); notificationSource.SingleBlockRead += (s, a) => provider.Add(a.Left, a.Right); var wsrc = notificationSource.ToWaveSource(); // reads through the wave source as it is playing // This is the key to getting the realtime music. byte[] buffer = new byte[wsrc.WaveFormat.BytesPerSecond]; wasapiCaptureSource.DataAvailable += (s, e) => { int read = wsrc.Read(buffer, 0, buffer.Length); }; // starts the listening. wasapiCapture.Start(); // gathers the data and sends it to the handler in a loop. var fftBuffer = new float[(int)fftSize]; while (true) { if (provider.GetFftData(fftBuffer)) { Console.Clear(); handler.CreateSpectrumLineInternal(fftBuffer, 100); } } // Stops Listening. wasapiCapture.Stop(); } } // bool Data_Available = false; // Double[] Audio_Samples = new Double[0]; // var waveIn = new WasapiLoopbackCapture(); // waveIn.DataAvailable += ( sender, e) => //{ // Int32 sample_count = e.ByteCount / (waveIn.WaveFormat.BitsPerSample / 8); // Single[] data = new Single[sample_count]; // for (int i = 0; i < sample_count; ++i) // { // data[i] = BitConverter.ToSingle(e.Data, i * 4); // } // int j = 0; // Audio_Samples = new Double[sample_count / 2]; // for (int sample = 0; sample < data.Length; sample += 2) // { // Audio_Samples[j] = (Double)data[sample]; // Audio_Samples[j] += (Double)data[sample + 1]; // ++j; // Console.WriteLine(Audio_Samples[j].ToString()); // } // Data_Available = true; //}; // waveIn.Initialize(); // //waveIn.Stopped += OnRecordingStopped; // waveIn.Start(); // while (true) // { // if (Data_Available) // { // Data_Available = false; // //Console.WriteLine(Audio_Samples.ToString()); // } // } //using (WasapiCapture capture = new WasapiLoopbackCapture()) //{ // //if nessesary, you can choose a device here // //to do so, simply set the device property of the capture to any MMDevice // //to choose a device, take a look at the sample here: http://cscore.codeplex.com/ // //initialize the selected device for recording // capture.Initialize(); // var eq = new Equalizer(new SoundInSource(capture)); // var fft = new FftProvider(3, FftSize.Fft1024); // var tenb = Equalizer.Create10BandEqualizer(new SoundInSource(capture)); //create a wavewriter to write the data to //using (WaveWriter w = new WaveWriter("dump.wav", capture.WaveFormat)) //{ // //setup an eventhandler to receive the recorded data // capture.DataAvailable += (s, e) => // { // //save the recorded audio // w.Write(e.Data, e.Offset, e.ByteCount); // }; // Console.WriteLine("starting..."); // //start recording // capture.Start(); // Console.ReadKey(); // capture.Stop(); //} }
private void StopCapture() { if (_soundIn != null) { _soundIn.Stop(); _soundIn.Dispose(); _soundIn = null; _finalSource.Dispose(); if (_writer is IDisposable) ((IDisposable) _writer).Dispose(); btnStop.Enabled = false; btnStart.Enabled = true; } }
private void StartCapture(string fileName) { if (SelectedDevice == null) return; if(CaptureMode == CaptureMode.Capture) _soundIn = new WasapiCapture(); else _soundIn = new WasapiLoopbackCapture(); _soundIn.Device = SelectedDevice; _soundIn.Initialize(); var soundInSource = new SoundInSource(_soundIn); var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource()); _finalSource = singleBlockNotificationStream.ToWaveSource(); _writer = new WaveWriter(fileName, _finalSource.WaveFormat); byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += (s, e) => { int read; while((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0) _writer.Write(buffer, 0, read); }; singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead; _soundIn.Start(); }
/// <summary> /// Capture the audio outputed by the soundcard to a wave file. /// The sound you ear is recorded and saved to [wavefile].wav /// </summary> /// <param name="wavefile">name of the wave file with extension</param> /// <param name="captureSilence">if true record blank sounds</param> public void CaptureSpeakersToWave(string wavefile, bool captureSilence) { _capture = new WasapiLoopbackCapture(); //initialize the selected device for recording _capture.Initialize(); //create a wavewriter to write the data to _waveWriter = new WaveWriter(wavefile, _capture.WaveFormat); //setup an eventhandler to receive the recorded data _capture.DataAvailable += (s, e) => { //save the recorded audio _waveWriter.Write(e.Data, e.Offset, e.ByteCount); }; //start recording _capture.Start(); if (captureSilence) { CaptureSilence(); } }
/// <summary> /// stop audio capture started with CaptureAudioToWave /// </summary> public void UnCaptureSpeakersToWave() { //Stop silence recording if (_soundSilenceOut != null) { _soundSilenceOut.Stop(); _soundSilenceOut.Dispose(); _soundSilenceSource.Dispose(); _soundSilenceOut = null; _soundSilenceSource = null; } //stop recording _capture.Stop(); _waveWriter.Dispose(); _waveWriter = null; _capture.Dispose(); _capture = null; }