private void btnStart_Click(object sender, EventArgs e) { if (deviceslist.SelectedItems.Count <= 0) return; SaveFileDialog sfd = new SaveFileDialog(); sfd.Filter = "WAV (*.wav)|*.wav"; sfd.Title = "Speichern"; sfd.FileName = String.Empty; if (sfd.ShowDialog() == System.Windows.Forms.DialogResult.OK) { _waveIn = new WaveInEvent(new WaveFormat(44100, 16, _selectedDevice.Channels)); _waveIn.Device = deviceslist.SelectedItems[0].Index; _waveIn.Initialize(); _waveIn.Start(); var waveInToSource = new SoundInSource(_waveIn); _source = waveInToSource; var notifyStream = new SingleBlockNotificationStream(_source); notifyStream.SingleBlockRead += OnNotifyStream_SingleBlockRead; _source = notifyStream.ToWaveSource(16); _writerBuffer = new byte[_source.WaveFormat.BytesPerSecond]; _writer = new WaveWriter(File.OpenWrite(sfd.FileName), _source.WaveFormat); waveInToSource.DataAvailable += OnNewData; btnStart.Enabled = false; btnStop.Enabled = true; } }
/// <summary> /// Record sound made in Mic and save it to a wave file /// </summary> /// <param name="wavefile">name of the wave file with extension</param> public void CaptureMicToWave(string wavefile) { int i = 0; string extension = ".wav"; foreach (var device in WaveIn.Devices) { _waveIn = new WaveInEvent(new WaveFormat(44100, 16, device.Channels)); _waveIn.Device = i++; _waveIn.Initialize(); _waveIn.Start(); var waveInToSource = new SoundInSource(_waveIn); _source = waveInToSource; var notifyStream = new SingleBlockNotificationStream(_source); _source = notifyStream.ToWaveSource(16); _writerBuffer = new byte[_source.WaveFormat.BytesPerSecond]; wavefile = string.Format("{0}{1}{2}", wavefile.Remove(wavefile.LastIndexOf(extension) - (i > 1 ? 1 : 0)), i, extension); _writer = new WaveWriter(wavefile, _source.WaveFormat); waveInToSource.DataAvailable += (s, e) => { int read = 0; while ((read = _source.Read(_writerBuffer, 0, _writerBuffer.Length)) > 0) { _writer.Write(_writerBuffer, 0, read); } }; } }
/** Initializes the realtime audio processing handlers */ public void BeginRecording() { // recoreds output data from wasapi loopback sound card. using (wasapiCapture = new WasapiLoopbackCapture()) { wasapiCapture.Initialize(); wasapiCaptureSource = new SoundInSource(wasapiCapture); // TODO: Stereo or Mono? using (var stereoSource = wasapiCaptureSource.ToStereo()) { // creates the spectrum Provider (Our own FFTProvider) provider = new BasicSpectrumProvider(stereoSource.WaveFormat.Channels, stereoSource.WaveFormat.SampleRate, fftSize); // creates the handler that uses the SpectrumProvider. var handler = new FFTHandler(FftSize.Fft4096) { SpectrumProvider = provider, UseAverage = true, height = 100, BarCount = 10, BarSpacing = 2, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Sqrt }; // notifies the spectrum provider each block read var notificationSource = new SingleBlockNotificationStream(wasapiCaptureSource.ToSampleSource()); notificationSource.SingleBlockRead += (s, a) => provider.Add(a.Left, a.Right); var wsrc = notificationSource.ToWaveSource(); // reads through the wave source as it is playing // This is the key to getting the realtime music. byte[] buffer = new byte[wsrc.WaveFormat.BytesPerSecond]; wasapiCaptureSource.DataAvailable += (s, e) => { int read = wsrc.Read(buffer, 0, buffer.Length); }; // starts the listening. wasapiCapture.Start(); // gathers the data and sends it to the handler in a loop. var fftBuffer = new float[(int)fftSize]; while (true) { if (provider.GetFftData(fftBuffer)) { Console.Clear(); handler.CreateSpectrumLineInternal(fftBuffer, 100); } } // Stops Listening. wasapiCapture.Stop(); } } // bool Data_Available = false; // Double[] Audio_Samples = new Double[0]; // var waveIn = new WasapiLoopbackCapture(); // waveIn.DataAvailable += ( sender, e) => //{ // Int32 sample_count = e.ByteCount / (waveIn.WaveFormat.BitsPerSample / 8); // Single[] data = new Single[sample_count]; // for (int i = 0; i < sample_count; ++i) // { // data[i] = BitConverter.ToSingle(e.Data, i * 4); // } // int j = 0; // Audio_Samples = new Double[sample_count / 2]; // for (int sample = 0; sample < data.Length; sample += 2) // { // Audio_Samples[j] = (Double)data[sample]; // Audio_Samples[j] += (Double)data[sample + 1]; // ++j; // Console.WriteLine(Audio_Samples[j].ToString()); // } // Data_Available = true; //}; // waveIn.Initialize(); // //waveIn.Stopped += OnRecordingStopped; // waveIn.Start(); // while (true) // { // if (Data_Available) // { // Data_Available = false; // //Console.WriteLine(Audio_Samples.ToString()); // } // } //using (WasapiCapture capture = new WasapiLoopbackCapture()) //{ // //if nessesary, you can choose a device here // //to do so, simply set the device property of the capture to any MMDevice // //to choose a device, take a look at the sample here: http://cscore.codeplex.com/ // //initialize the selected device for recording // capture.Initialize(); // var eq = new Equalizer(new SoundInSource(capture)); // var fft = new FftProvider(3, FftSize.Fft1024); // var tenb = Equalizer.Create10BandEqualizer(new SoundInSource(capture)); //create a wavewriter to write the data to //using (WaveWriter w = new WaveWriter("dump.wav", capture.WaveFormat)) //{ // //setup an eventhandler to receive the recorded data // capture.DataAvailable += (s, e) => // { // //save the recorded audio // w.Write(e.Data, e.Offset, e.ByteCount); // }; // Console.WriteLine("starting..."); // //start recording // capture.Start(); // Console.ReadKey(); // capture.Stop(); //} }
private void StartCapture(string fileName) { if (SelectedDevice == null) return; if(CaptureMode == CaptureMode.Capture) _soundIn = new WasapiCapture(); else _soundIn = new WasapiLoopbackCapture(); _soundIn.Device = SelectedDevice; _soundIn.Initialize(); var soundInSource = new SoundInSource(_soundIn); var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource()); _finalSource = singleBlockNotificationStream.ToWaveSource(); _writer = new WaveWriter(fileName, _finalSource.WaveFormat); byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += (s, e) => { int read; while((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0) _writer.Write(buffer, 0, read); }; singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead; _soundIn.Start(); }
private void openToolStripMenuItem_Click(object sender, EventArgs e) { var openFileDialog = new OpenFileDialog() { Filter = CodecFactory.SupportedFilesFilterEn, Title = "Select a file..." }; if (openFileDialog.ShowDialog() == DialogResult.OK) { Stop(); const FftSize fftSize = FftSize.Fft4096; IWaveSource source = CodecFactory.Instance.GetCodec(openFileDialog.FileName); var spectrumProvider = new BasicSpectrumProvider(source.WaveFormat.Channels, source.WaveFormat.SampleRate, fftSize); _lineSpectrum = new LineSpectrum(fftSize) { SpectrumProvider = spectrumProvider, UseAverage = true, BarCount = 50, BarSpacing = 2, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Sqrt }; _voicePrint3DSpectrum = new VoicePrint3DSpectrum(fftSize) { SpectrumProvider = spectrumProvider, UseAverage = true, PointCount = 200, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Sqrt }; var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource()); notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right); _source = notificationSource.ToWaveSource(16); _soundOut = new WasapiOut(); _soundOut.Initialize(_source.ToMono()); _soundOut.Play(); timer1.Start(); propertyGridTop.SelectedObject = _lineSpectrum; propertyGridBottom.SelectedObject = _voicePrint3DSpectrum; } }