Example #1
0
        public void StartListen()
        {
            _loopbackCapture = new WasapiLoopbackCapture();
            _loopbackCapture.Initialize();

            _soundInSource = new SoundInSource(_loopbackCapture);

            _basicSpectrumProvider = new BasicSpectrumProvider(_soundInSource.WaveFormat.Channels, _soundInSource.WaveFormat.SampleRate, CFftSize);

            _lineSpectrum = new LineSpectrum(CFftSize)
            {
                SpectrumProvider = _basicSpectrumProvider,
                BarCount         = _spectrumSize,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = _scalingStrategy
            };

            _loopbackCapture.Start();

            _singleBlockNotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource());
            _realtimeSource = _singleBlockNotificationStream.ToWaveSource();

            byte[] buffer = new byte[_realtimeSource.WaveFormat.BytesPerSecond / 2];

            _soundInSource.DataAvailable += (s, ea) =>
            {
                while (_realtimeSource.Read(buffer, 0, buffer.Length) > 0)
                {
                    float[] spectrumData = _lineSpectrum.GetSpectrumData(MaxAudioValue);

                    if (spectrumData != null && _receiveAudio != null)
                    {
                        _receiveAudio(spectrumData);
                    }
                }
            };

            _singleBlockNotificationStream.SingleBlockRead += singleBlockNotificationStream_SingleBlockRead;
        }
Example #2
0
        private void Initialize()
        {
            if (_capture != null)
            {
                Cleanup();
            }

            // This uses the wasapi api to get any sound data played by the computer
            _capture = new WasapiLoopbackCapture(20);
            _capture.Initialize();
            var source = new SoundInSource(_capture).ToSampleSource();

            _capture.DataAvailable += Capture_DataAvailable;

            var notificationSource = new SingleBlockNotificationStream(source);

            notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;
            _finalSource = notificationSource.ToWaveSource();
            _rawBuffer   = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];

            // Actual fft data
            _fftBuffer = new float[(int)FftSize];
            _spectrum  = new SpectrumBase()
            {
                SpectrumProvider = new BasicSpectrumProvider(
                    _capture.WaveFormat.Channels,
                    _capture.WaveFormat.SampleRate,
                    FftSize
                    ),
                UseAverage         = UseAverage,
                IsXLogScale        = UseLogScale,
                ScalingStrategy    = ScalingStrategy,
                MinimumFrequency   = MinimumFrequency,
                MaximumFrequency   = MaximumFrequency,
                SpectrumResolution = FftBinCount,
                FftSize            = FftSize,
            };

            _capture.Start();
        }
Example #3
0
        public MainWindow()
        {
            InitializeComponent();

            Stop();

            //open the default device

            // IDW - capture from the microphone instead.
            //_soundIn = new WasapiLoopbackCapture();
            _soundIn = new WasapiCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            // var deviceEnumerator = new MMDeviceEnumerator();
            // _soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //play the audio
            _soundIn.Start();

            timer1.Start();

            propertyGrid.SelectedObject = _lineSpectrum;
        }
Example #4
0
        public void RecordAudio(MMDevice device)
        {
            if (_soundIn.RecordingState == RecordingState.Recording)
            {
                _soundIn.Stop();
            }
            _soundIn.Device = device;
            _soundIn.Initialize();
            var src = new SoundInSource(_soundIn);

            src.DataAvailable += (s, e) =>
            {
                int read;
                read = _source.Read(_buffer, 0, _buffer.Length);
            };
            //_source = new SoundInSource(_soundIn) { FillWithZeros = true };
            var singleBlockNotificationStream = new SingleBlockNotificationStream(src.ToSampleSource());

            //_echoSource = new DmoEchoEffect(_source);
            _source = singleBlockNotificationStream.ToWaveSource();
            _soundIn.Start();
        }
Example #5
0
        private void StartCapture(string fileName)
        {
            if (SelectedDevice == null)
            {
                return;
            }

            if (CaptureMode == CaptureMode.Capture)
            {
                _soundIn = new WasapiCapture();
            }
            else
            {
                _soundIn = new WasapiLoopbackCapture();
            }

            _soundIn.Device = SelectedDevice;
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();
            _writer      = new WaveWriter(fileName, _finalSource.WaveFormat);

            byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;

            _soundIn.Start();
        }
Example #6
0
        private void StartSendingAudio()
        {
            _oggEncProcess = new Process();
            _oggEncProcess.StartInfo.UseShellExecute        = false;
            _oggEncProcess.StartInfo.RedirectStandardInput  = true;
            _oggEncProcess.StartInfo.RedirectStandardOutput = true;
            _oggEncProcess.StartInfo.FileName  = "oggenc2.exe";
            _oggEncProcess.StartInfo.Arguments = "--raw --raw-format=3 --raw-rate=44000 -";
            //_oggEncProcess.StartInfo.Arguments = "--raw --raw-format=3 --raw-rate=48000 --resample 000 -";
            _oggEncProcess.StartInfo.CreateNoWindow = true;
            _oggEncProcess.Start();

            _waveIn = new CSCore.SoundIn.WasapiLoopbackCapture();
            _waveIn.Initialize();
            var soundInSource = new SoundInSource(_waveIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();

            byte[] inBuffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, _) =>
            {
                int read = 0;
                while ((read = _finalSource.Read(inBuffer, 0, inBuffer.Length)) > 0)
                {
                    _oggEncProcess.StandardInput.BaseStream.Write(inBuffer, 0, read);
                }
            };

            var stdOut = new AsyncStreamChunker(_oggEncProcess.StandardOutput);

            stdOut.DataReceived += async(s, data) =>
            {
                await SendData(_receiving, _sending, data);
            };
            stdOut.Start();

            _waveIn.Start();
        }
Example #7
0
        public SoundInSourceWrapper(SoundInSource soundIn, BroadcastSettings settings)
        {
            Settings = settings;

            _soundIn = soundIn;

            _convertedSource =
                soundIn.ChangeSampleRate(Settings.SampleRate).ToSampleSource().ToWaveSource(Settings.BitDepth);

            if (settings.Channel == AudioChannel.Mono)
            {
                _convertedSource = _convertedSource.ToMono();
            }
            else
            {
                _convertedSource = _convertedSource.ToStereo();
            }

            _audioChunks = new ConcurrentQueue <byte[]>();

            _soundIn.DataAvailable += SoundInDataAvailable;
        }
Example #8
0
        public ParticleMoveSystem(World world)
        {
            particleSet = world.GetEntities().With <Translation>().With <Velocity>().AsSet();
            capture     = new WasapiLoopbackCapture();
            capture.Initialize();
            var soundInSource = new SoundInSource(capture);
            var source        = soundInSource.ToSampleSource();

            fft = new FftProvider(source.WaveFormat.Channels, fftSize);

            var notificationSource = new SingleBlockNotificationStream(source);

            notificationSource.SingleBlockRead += SingleBlockRead;

            waveSource = notificationSource.ToWaveSource(16);
            buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

            soundInSource.DataAvailable += DataAvailable;

            // capture.DataAvailable += (sender, args) => DataAvailable(sender, args);
            capture.Start();
        }
        public override void EnablePlugin()
        {
            //open the default device
            _soundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            //_soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource(); //.AppendSource(x => new PitchShifter(x), out _pitchShifter);

            //create a spectrum provider which provides fft data based on some input
            _spectrumProvider = new BasicSpectrumProvider(source.WaveFormat.Channels,
                                                          source.WaveFormat.SampleRate, fftSize);


            //the SingleBlockNotificationStream is used to intercept the played samples
            var notificationSource = new SingleBlockNotificationStream(source);

            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += (s, a) => _spectrumProvider.Add(a.Left, a.Right);

            _source = notificationSource.ToWaveSource(16);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) => {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //play the audio
            _soundIn.Start();
        }
Example #10
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (cbInputDev.SelectedItem != null && cbOutputDev.SelectedItem != null)
            {
                cbInputDev.Enabled = cbOutputDev.Enabled = button1.Enabled = button2.Enabled = checkBox1.Enabled = false;

                soundIn        = new WasapiCapture(true, AudioClientShareMode.Shared, 30);
                soundIn.Device = (MMDevice)cbInputDev.SelectedItem;
                soundIn.Initialize();
                soundIn.Start();

                mixer = new SourceMixer(soundIn.WaveFormat.Channels, soundIn.WaveFormat.SampleRate);

                var waveSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = true
                };
                mixer.AddSource(waveSource.ToSampleSource());

                var mixedSource = mixer.ToWaveSource();

                soundOut        = new WasapiOut();
                soundOut.Device = (MMDevice)cbOutputDev.SelectedItem;
                soundOut.Initialize(mixedSource);
                soundOut.Play();

                if (checkBox1.Checked)
                {
                    var monitorSource = new SoundInSource(soundIn)
                    {
                        FillWithZeros = true
                    };
                    monitoringOut = new WasapiOut();
                    monitoringOut.Initialize(monitorSource);
                    monitoringOut.Play();
                }
            }
        }
Example #11
0
        public void startRecording(MMDevice micDevice, MMDevice speakDevice)
        {
            isRecording = true;
            window.LockUI();
            playSilence();
            makeFileNames();

            micCapture        = new WasapiCapture();
            micCapture.Device = micDevice;
            micCapture.Initialize();

            speakCapture        = new WasapiLoopbackCapture();
            speakCapture.Device = speakDevice;
            speakCapture.Initialize();

            micSource = new SoundInSource(micCapture);

            micWriter = MediaFoundationEncoder.CreateMP3Encoder(micSource.WaveFormat, micFileName);
            byte[] micBuffer = new byte[micSource.WaveFormat.BytesPerSecond];
            micSource.DataAvailable += (s, e) =>
            {
                int read = micSource.Read(micBuffer, 0, micBuffer.Length);
                micWriter.Write(micBuffer, 0, read);
            };

            micCapture.Start();

            speakSource = new SoundInSource(speakCapture);
            speakWriter = MediaFoundationEncoder.CreateMP3Encoder(speakSource.WaveFormat, speakFileName);
            byte[] speakBuffer = new byte[speakSource.WaveFormat.BytesPerSecond];
            speakSource.DataAvailable += (s, e) =>
            {
                int read = speakSource.Read(speakBuffer, 0, speakBuffer.Length);
                speakWriter.Write(speakBuffer, 0, read);
            };

            speakCapture.Start();
        }
        internal void StartListen()
        {
            capture.Initialize();
            soundInSource         = new SoundInSource(capture);
            basicSpectrumProvider = new BasicSpectrumProvider(soundInSource.WaveFormat.Channels, soundInSource.WaveFormat.SampleRate, C_FftSize);
            lineSpectrum          = new LineSpectrum(C_FftSize, minFrequency, maxFrequency)
            {
                SpectrumProvider = basicSpectrumProvider,
                BarCount         = spectrumSize,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = EScalingStrategy.Sqrt
            };

            capture.Start();

            ISampleSource sampleSource = soundInSource.ToSampleSource();

            singleBlockNotificationStream = new SingleBlockNotificationStream(sampleSource);
            realtimeSource = singleBlockNotificationStream.ToWaveSource();

            byte[] buffer = new byte[realtimeSource.WaveFormat.BytesPerSecond / 128];

            soundInSource.DataAvailable += (s, ea) =>
            {
                while (realtimeSource.Read(buffer, 0, buffer.Length) > 0)
                {
                    var spectrumData = lineSpectrum.GetSpectrumData(C_MaxAudioValue);

                    if (spectrumData != null)
                    {
                        receiveAudio?.Invoke(spectrumData);
                    }
                }
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
        }
Example #13
0
        private void StartCapture(string fileName)
        {
            if (settingsForm.SelectedDevice == null)
            {
                return;
            }

            _soundIn        = new WasapiLoopbackCapture();
            _soundIn.Device = settingsForm.SelectedDevice;
            _soundIn.Initialize();

            // Check file name availability, if not available add "_" while the name exists
            string finalName = Path.ChangeExtension(fileName, null);

            while (File.Exists(finalName + ".wav"))
            {
                finalName += "_";
            }
            finalName += ".wav";

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();
            _writer      = new WaveWriter(finalName, _finalSource.WaveFormat);

            byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read = _finalSource.Read(buffer, 0, buffer.Length);
                //while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0) Causes stops in the music recording (very bad idea)
                _writer.Write(buffer, 0, read);
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;

            _soundIn.Start();
        }
Example #14
0
        public MP3Recorder(string filename)
        {
            if (File.Exists(filename))
            {
                File.Delete(filename);
            }
            wasapiCapture_ = new WasapiCapture();
            wasapiCapture_.Initialize();
            var
                wasapiCaptureSource = new SoundInSource(wasapiCapture_);

            stereoSource_ = wasapiCaptureSource.ToStereo();
            writer_       = MediaFoundationEncoder.CreateMP3Encoder(stereoSource_.WaveFormat, filename);
            byte []
            buffer = new byte[stereoSource_.WaveFormat.BytesPerSecond];
            wasapiCaptureSource.DataAvailable += (s, e) =>
            {
                int
                    read = stereoSource_.Read(buffer, 0, buffer.Length);
                writer_.Write(buffer, 0, read);
            };
            wasapiCapture_.Start();
        }
Example #15
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Stop();
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _soundIn.Start();
        }
Example #16
0
        void StartRecordByCSCore()
        {
            GetFilePath();
            _soundIn        = new WasapiCapture();
            _soundIn.Device = (MMDevice)listView1.SelectedItems[0].Tag;
            _soundIn.Initialize();
            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();
            _writer      = new WaveWriter(sDestinationFile, _finalSource.WaveFormat);
            byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            _soundIn.Start();
        }
Example #17
0
        private void InitializeAudioCapture(MMDevice selectedDevice)
        {
            if (selectedDevice != null)
            {
                mAudioCapture = selectedDevice.DataFlow == DataFlow.Capture ?
                                new WasapiCapture() : new WasapiLoopbackCapture();
                mAudioCapture.Device = selectedDevice;
                mAudioCapture.Initialize();
                mAudioCapture.DataAvailable += Capture_DataAvailable;
                mSoundInSource = new SoundInSource(mAudioCapture)
                {
                    FillWithZeros = false
                };
                //create a source, that converts the data provided by the
                //soundInSource to required format
                mConvertedSource = mSoundInSource
                                   .ChangeSampleRate(SampleRate) // sample rate
                                   .ToSampleSource()
                                   .ToWaveSource(16);            //bits per sample

                mConvertedSource = mConvertedSource.ToMono();
            }
        }
Example #18
0
        public void Initialize()
        {
            MMDevice   captureDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            WaveFormat deviceFormat  = captureDevice.DeviceFormat;

            _audioEndpointVolume = AudioEndpointVolume.FromDevice(captureDevice);

            //DarthAffe 07.02.2018: This is a really stupid workaround to (hopefully) finally fix the surround driver issues
            for (int i = 1; i < 13; i++)
            {
                try
                {
                    _capture = new WasapiLoopbackCapture(100, new WaveFormat(deviceFormat.SampleRate, deviceFormat.BitsPerSample, i));
                }
                catch
                { }
            }

            if (_capture == null)
            {
                throw new NullReferenceException("Failed to initialize WasapiLoopbackCapture");
            }

            _capture.Initialize();
            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };

            _stream = _soundInSource.WaveFormat.SampleRate == 44100
                ? new SingleBlockNotificationStream(_soundInSource.ToStereo().ToSampleSource())
                : new SingleBlockNotificationStream(_soundInSource.ChangeSampleRate(44100).ToStereo().ToSampleSource());

            _soundInSource.DataAvailable += OnSoundDataAvailable;

            _capture.Start();
        }
Example #19
0
        private void StartAudioLoopback()
        {
            _oggEncProcess = new Process();
            _oggEncProcess.StartInfo.UseShellExecute        = false;
            _oggEncProcess.StartInfo.RedirectStandardInput  = true;
            _oggEncProcess.StartInfo.RedirectStandardOutput = true;
            _oggEncProcess.StartInfo.FileName       = "oggenc2.exe";
            _oggEncProcess.StartInfo.Arguments      = "--raw --raw-format=3 --raw-rate=48000 --resample 44100 -";
            _oggEncProcess.StartInfo.CreateNoWindow = true;
            _oggEncProcess.Start();

            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();
            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();

            byte[] inBuffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, _) =>
            {
                int read;
                while ((read = _finalSource.Read(inBuffer, 0, inBuffer.Length)) > 0)
                {
                    _oggEncProcess.StandardInput.BaseStream.Write(inBuffer, 0, read);
                }
            };

            _tcpListener = new AsyncTcpListener();
            _tcpListener.ClientConnected += (s, _) =>
            {
                _soundIn.Start();
            };
            _stdOut = new AsyncStreamChunker(_oggEncProcess.StandardOutput);
            _stdOut.DataReceived += (s, data) => _tcpListener.Write(data, 0, 512);
            _stdOut.Start();
        }
Example #20
0
        private void btnStart_Click(object sender, EventArgs e)
        {
            if (deviceslist.SelectedItems.Count <= 0)
            {
                return;
            }

            SaveFileDialog sfd = new SaveFileDialog();

            sfd.Filter   = "WAV (*.wav)|*.wav";
            sfd.Title    = "Speichern";
            sfd.FileName = String.Empty;
            if (sfd.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                _waveIn        = new WaveInEvent(new WaveFormat(44100, 16, _selectedDevice.Channels));
                _waveIn.Device = deviceslist.SelectedItems[0].Index;

                _waveIn.Initialize();
                _waveIn.Start();

                var waveInToSource = new SoundInSource(_waveIn);

                _source = waveInToSource;
                var notifyStream = new SingleBlockNotificationStream(_source);
                notifyStream.SingleBlockRead += OnNotifyStream_SingleBlockRead;

                _source       = notifyStream.ToWaveSource(16);
                _writerBuffer = new byte[_source.WaveFormat.BytesPerSecond];

                _writer = new WaveWriter(File.OpenWrite(sfd.FileName), _source.WaveFormat);
                waveInToSource.DataAvailable += OnNewData;

                btnStart.Enabled = false;
                btnStop.Enabled  = true;
            }
        }
Example #21
0
        public void Start()
        {
            if (_device == null || (_soundIn != null && _soundIn.RecordingState == RecordingState.Recording))
            {
                return;
            }
            //create a new soundIn instance
            _soundIn = _captureMode == CaptureMode.Capture ? new WasapiCapture() : new WasapiLoopbackCapture();
            //optional: set some properties
            _soundIn.Device = _device;

            //initialize the soundIn instance
            _soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            SoundInSource soundInSource = new SoundInSource(_soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            _convertedSource = soundInSource
                               .ChangeSampleRate(sampleRate) // sample rate
                               .ToSampleSource()
                               .ToWaveSource(bitsPerSample); //bits per sample

            //channels...
            _convertedSource = channels == 1 ? _convertedSource.ToMono() : _convertedSource.ToStereo();
            _waveWriter      = new WaveWriter("out.wav", _convertedSource.WaveFormat);

            soundInSource.DataAvailable += OnDatAvailable;
            _soundIn.Start();
        }
Example #22
0
        private bool Initialize()
        {
            if (_initialized)
            {
                return(true);
            }

            var currentTicks = Environment.TickCount;

            if (currentTicks - _lastInitializeTickCount < 1000)
            {
                return(false);
            }
            _lastInitializeTickCount = currentTicks;

            try
            {
                _soundIn = new WasapiLoopbackCapture();
                _soundIn.Initialize();
            }
            catch (Exception e)
            {
                Logger.Debug(e, "Failed to initialize WasapiLoopbackCapture!");
                return(false);
            }

            Logger.Debug($"Initialized WasapiLoopbackCapture on \"{_soundIn.Device.FriendlyName}\"");

            var soundInSource = new SoundInSource(_soundIn);
            var sampleSource  = soundInSource.ToSampleSource();

            const FftSize fftSize = FftSize.Fft1024;

            _fftBuffer        = new float[(int)fftSize];
            _spectrumProvider = new SpectrumProvider(sampleSource.WaveFormat.Channels, sampleSource.WaveFormat.SampleRate, fftSize);

            var notificationSource = new DataNotificationSource(sampleSource);

            notificationSource.DataRead += (s, e) => _spectrumProvider.Add(e.Data, e.Data.Length);

            var waveSource = notificationSource.ToWaveSource(16);
            var buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

            soundInSource.DataAvailable += (s, e) => { while (waveSource.Read(buffer, 0, buffer.Length) > 0)
                                                       {
                                                           ;
                                                       }
            };

            _spectrum = new LedSpectrum(Config.ColorGradient)
            {
                FftSize          = fftSize,
                SpectrumProvider = _spectrumProvider,
                UseAverage       = Config.UseAverage,
                MinimumFrequency = Config.MinimumFrequency,
                MaximumFrequency = Config.MaximumFrequency,
                ScalingStrategy  = Config.ScalingStrategy,
                ScalingFactor    = Config.ScalingFactor,
                IsXLogScale      = false
            };

            _soundIn.Start();

            _initialized = true;
            return(true);
        }
Example #23
0
        static async Task MainAsync()
        {
            Console.Title = "Audio Streamer - PC to Android";

            IPAddress IPAddr;
            bool      UseAdb = false;

            try
            {
                var AdbDevices = Process.Start(new ProcessStartInfo()
                {
                    FileName               = "adb",
                    Arguments              = "devices",
                    UseShellExecute        = false,
                    RedirectStandardOutput = true
                });

                await AdbDevices.StandardOutput.ReadLineAsync();

                UseAdb = !string.IsNullOrWhiteSpace(await AdbDevices.StandardOutput.ReadLineAsync());
            }
            catch (System.ComponentModel.Win32Exception)
            {
            }

            if (UseAdb)
            {
                IPAddr = IPAddress.Loopback;
            }
            else
            {
                Console.Write("IP: ");
                IPAddr = IPAddress.Parse(Console.ReadLine());
            }

            Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.High;
            using (Capture = new WasapiLoopbackCapture(0, new CSCore.WaveFormat(), ThreadPriority.Highest))
            {
                while (true)
                {
                    var NoSpamDelay = Task.Delay(1000);
                    if (UseAdb)
                    {
                        Process.Start(new ProcessStartInfo()
                        {
                            FileName        = "adb",
                            Arguments       = "forward tcp:1420 tcp:1420",
                            UseShellExecute = false
                        });
                    }

                    using (var Conn = new TcpClient()
                    {
                        NoDelay = true,
                        ReceiveBufferSize = 64,
                        SendBufferSize = 1 << 12    //2^12 = ~4000 so 1000 floats
                    })
                    {
                        try
                        {
                            await Conn.ConnectAsync(IPAddr, ServerPort);

                            Stream = Conn.GetStream();
                            if (Stream.ReadByte() == 1)
                            {
                                Console.WriteLine("Connected to " + IPAddr.ToString());
                                Capture.Initialize();
                                using (Source = new SoundInSource(Capture))
                                {
                                    int SampleRateServer = Source.WaveFormat.SampleRate;
                                    int SampleRateClient = Stream.ReadByte() | Stream.ReadByte() << 8 | Stream.ReadByte() << 16;
                                    if (SampleRateClient != SampleRateServer)
                                    {
                                        Console.WriteLine($"Sample rate mismatch, PC was {SampleRateServer} Hz but client was {SampleRateClient} Hz");
                                        Console.WriteLine("Adjust your PC's sample rate then press any key to try again");
                                        Console.ReadKey();
                                        Console.Clear();
                                    }
                                    else
                                    {
                                        // Start Capturing
                                        Source.DataAvailable += DataAvailable;
                                        Capture.Start();

                                        Console.WriteLine($"Started recording audio at {SampleRateServer} Hz");
                                        Window.SetWindowShown(false);

                                        // Stop Capturing
                                        await(DisconnectWaiter = new TaskCompletionSource <bool>()).Task;
                                        await Task.Run(() => Capture.Stop());

                                        Window.SetWindowShown(true);
                                        Console.WriteLine("Disconnected, stopped recording audio");
                                    }
                                }
                            }
                        }
                        catch { }
                        await NoSpamDelay;
                    }
                }
            }
        }
Example #24
0
        static void writeSpeakersToWav(string[] args)
        {
            const int GOOGLE_RATE            = 16000;
            const int GOOGLE_BITS_PER_SAMPLE = 16;
            const int GOOGLE_CHANNELS        = 1;
            const int EARPHONES = 5;

            CaptureMode captureMode = CaptureMode.LoopbackCapture;

            DataFlow dataFlow = captureMode == CaptureMode.Capture ? DataFlow.Capture : DataFlow.Render;

            var devices = MMDeviceEnumerator.EnumerateDevices(dataFlow, DeviceState.Active);

            foreach (var d in devices)
            {
                Console.WriteLine("- {0:#00}: {1}", d, d.FriendlyName);
            }
            var headphones = devices.First(x => x.FriendlyName.StartsWith("small"));

            //using (WasapiCapture capture = new WasapiLoopbackCapture())
            using (WasapiCapture soundIn = captureMode == CaptureMode.Capture
                ? new WasapiCapture()
                : new WasapiLoopbackCapture())
            {
                //if nessesary, you can choose a device here
                //to do so, simply set the device property of the capture to any MMDevice
                //to choose a device, take a look at the sample here: http://cscore.codeplex.com/

                soundIn.Device = headphones;

                Console.WriteLine("Waiting, press any key to start");
                Console.ReadKey();
                //initialize the selected device for recording
                soundIn.Initialize();

                //create a SoundSource around the the soundIn instance
                //this SoundSource will provide data, captured by the soundIn instance
                SoundInSource soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the
                //soundInSource to any other format
                //in this case the "Fluent"-extension methods are being used
                IWaveSource convertedSource = soundInSource
                                              .ChangeSampleRate(GOOGLE_RATE)         // sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(GOOGLE_BITS_PER_SAMPLE); //bits per sample

                var channels = GOOGLE_CHANNELS;

                //channels...
                using (convertedSource = channels == 1 ? convertedSource.ToMono() : convertedSource.ToStereo())
                    //create a wavewriter to write the data to
                    using (WaveWriter w = new WaveWriter("dump.wav", convertedSource.WaveFormat))
                    {
                        //setup an eventhandler to receive the recorded data
                        //register an event handler for the DataAvailable event of
                        //the soundInSource
                        //Important: use the DataAvailable of the SoundInSource
                        //If you use the DataAvailable event of the ISoundIn itself
                        //the data recorded by that event might won't be available at the
                        //soundInSource yet
                        soundInSource.DataAvailable += (s, e) =>
                        {
                            //read data from the converedSource
                            //important: don't use the e.Data here
                            //the e.Data contains the raw data provided by the
                            //soundInSource which won't have your target format
                            byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                            int    read;

                            //keep reading as long as we still get some data
                            //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                            while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                            {
                                //write the read data to a file
                                // ReSharper disable once AccessToDisposedClosure
                                w.Write(buffer, 0, read);
                            }
                        };


                        //start recording
                        soundIn.Start();

                        Console.WriteLine("Started, press any key to stop");
                        Console.ReadKey();

                        //stop recording
                        soundIn.Stop();
                    }
            }
        }
        static int Main(string[] args)
        {
            int    time;
            string output_file;

            switch (args.Length)
            {
            case 1:
                if (args[0] == "-h")
                {
                    System.Console.WriteLine("Usage:");
                    System.Console.WriteLine("    LoopbackCapture.exe <output/wav> <time/milliseconds>");
                    return(1);
                }
                output_file = args[0];
                time        = 0;
                break;

            case 2:
                output_file = args[0];
                try
                {
                    time = Int32.Parse(args[1]);
                }
                catch
                {
                    time = 0;
                }
                break;

            default:
                time        = 0;
                output_file = "record.wav";
                break;
            }

            int sampleRate    = 48000;
            int bitsPerSample = 24;

            //create a new soundIn instance
            using (WasapiCapture soundIn = new WasapiLoopbackCapture())
            {
                //initialize the soundIn instance
                soundIn.Initialize();

                //create a SoundSource around the the soundIn instance
                SoundInSource soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the soundInSource to any other format

                IWaveSource convertedSource = soundInSource
                                              .ChangeSampleRate(sampleRate) // sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(bitsPerSample); //bits per sample

                //channels...
                using (convertedSource = convertedSource.ToStereo())
                {
                    //create a new wavefile
                    using (WaveWriter waveWriter = new WaveWriter(output_file, convertedSource.WaveFormat))
                    {
                        //register an event handler for the DataAvailable event of the soundInSource
                        soundInSource.DataAvailable += (s, e) =>
                        {
                            //read data from the converedSource
                            byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                            int    read;

                            //keep reading as long as we still get some data
                            while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                            {
                                //write the read data to a file
                                waveWriter.Write(buffer, 0, read);
                            }
                        };

                        //start recording
                        soundIn.Start();

                        //delay and keep recording
                        if (time != 0)
                        {
                            Thread.Sleep(time);
                        }
                        else
                        {
                            Console.ReadKey();
                        }

                        //stop recording
                        soundIn.Stop();
                    }
                }
            }
            return(0);
        }
Example #26
0
        public void Record(string deviceName, string audioFilePath = @"C:\Temp\output.wav")
        {
            _timer = new Stopwatch();
            _timer.Start();

            // choose the capture mod
            CaptureMode captureMode = CaptureMode.LoopbackCapture;
            DataFlow    dataFlow    = captureMode == CaptureMode.Capture ? DataFlow.Capture : DataFlow.Render;

            //select the device:
            var devices = MMDeviceEnumerator.EnumerateDevices(dataFlow, DeviceState.Active);

            if (!devices.Any())
            {
                Console.WriteLine("### No devices found.");
                return;
            }

            Console.WriteLine($"### Using device {deviceName}");
            var device = devices.First(d => d.FriendlyName.Equals(deviceName));

            //start recording
            //create a new soundIn instance
            _soundIn = captureMode == CaptureMode.Capture
                ? new WasapiCapture()
                : new WasapiLoopbackCapture();


            //optional: set some properties
            _soundIn.Device = device;


            //initialize the soundIn instance
            _soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            SoundInSource soundInSource = new SoundInSource(_soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            _convertedSource = soundInSource
                               .ChangeSampleRate(SampleRate) // sample rate
                               .ToSampleSource()
                               .ToWaveSource(BitsPerSample); //bits per sample

            //channels...
            _convertedSource = _convertedSource.ToMono();

            //create a new wavefile
            _waveWriter = new WaveWriter(audioFilePath, _convertedSource.WaveFormat);

            //register an event handler for the DataAvailable event of
            //the soundInSource
            //Important: use the DataAvailable of the SoundInSource
            //If you use the DataAvailable event of the ISoundIn itself
            //the data recorded by that event might won't be available at the
            //soundInSource yet
            soundInSource.DataAvailable += (s, e) =>
            {
                //read data from the converedSource
                //important: don't use the e.Data here
                //the e.Data contains the raw data provided by the
                //soundInSource which won't have your target format
                byte[] buffer = new byte[_convertedSource.WaveFormat.BytesPerSecond / 2];
                int    read;

                //keep reading as long as we still get some data
                //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                while ((read = _convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    //write the read data to a file
                    // ReSharper disable once AccessToDisposedClosure
                    _waveWriter.Write(buffer, 0, read);
                }
            };

            //we've set everything we need -> start capturing data
            _soundIn.Start();
            Console.WriteLine($"### RECORDING {audioFilePath}");

            while (_timer.ElapsedMilliseconds / 1000 < 15 && _timer.IsRunning)
            {
                Thread.Sleep(500);
            }

            Console.WriteLine("### STOP RECORDING");
            _soundIn.Stop();
            _timer.Stop();

            _waveWriter.Dispose();
            _convertedSource.Dispose();
            _soundIn.Dispose();

            AudioFileCaptured?.Invoke(this, new AudioRecorderEventArgs()
            {
                AudioFilePath = audioFilePath
            });
        }
Example #27
0
        //***********************************************************************************************************************************************************************************************************

        #region Control recorder (start, stop, pause, resume)

        /// <summary>
        /// Start a new record
        /// </summary>
        public void StartRecord()
        {
            try
            {
                if (RecordState == RecordStates.RECORDING)
                {
                    return;
                }

                if (TrackInfo == null)
                {
                    _logHandle.Report(new LogEventWarning("Record not started, because no track info exists."));
                    return;
                }
                CreateFilePath();
                if (RecorderRecSettings.FileExistMode == RecorderFileExistModes.SKIP && (System.IO.File.Exists(FileStrWAV) || System.IO.File.Exists(FileStrMP3)))
                {
                    _logHandle.Report(new LogEventWarning("Record (\"" + TrackInfo?.TrackName + "\") not started, because FileExistMode == SKIP and file already exists."));
                    return;
                }

                if (!Directory.Exists(RecorderRecSettings.BasePath))
                {
                    _logHandle.Report(new LogEventWarning("Record (\"" + TrackInfo?.TrackName + "\") not started, because RecordPath is invalid."));
                    return;
                }

                if (WasapiOut.IsSupportedOnCurrentPlatform)
                {
                    _silenceOut = new WasapiOut();
                }
                else
                {
                    _silenceOut = new DirectSoundOut();
                }
                _silenceOut.Initialize(new SilenceGenerator());
                _silenceOut.Play();         //Play silence because otherwise silent parts aren't recorded

                _capture = new WasapiLoopbackCapture();

                MMDeviceEnumerator devEnumerator = new MMDeviceEnumerator();
                MMDeviceCollection mMDevices     = devEnumerator.EnumAudioEndpoints(DataFlow.All, DeviceState.All);

                MMDevice dev;
                if (RecorderRecSettings.RecorderDeviceName.ToLower().Contains("default"))
                {
                    dev = devEnumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
                }
                else
                {
                    dev = mMDevices.Where(d => d.DeviceState == DeviceState.Active && d.FriendlyName == RecorderRecSettings.RecorderDeviceName)?.First();
                }

                if (dev == null)
                {
                    _logHandle.Report(new LogEventError("Record (\"" + TrackInfo?.TrackName + "\") not started, because device \"" + RecorderRecSettings.RecorderDeviceName + "\" wasn't found." + (RecorderRecSettings.RecorderDeviceName.Contains("CABLE Input") ? " Make sure that \"VB Cable\" is installed correctly." : "")));
                    return;
                }

                _capture.Device = dev;
                _capture.Initialize();      // Important!!! First set the capture device, then call Initialize(); otherwise audio is captured from the previous device

                SoundInSource soundInSource    = new SoundInSource(_capture);
                SampleToPcm16 soundInSourcePCM = new SampleToPcm16(soundInSource.ToSampleSource());     //Used to convert _capture to Pcm16 format

                Directory.CreateDirectory(Path.GetDirectoryName(FileStrWAV));
                _wavWriterFormat        = new WaveFormat(_capture.WaveFormat.SampleRate, soundInSourcePCM.WaveFormat.BitsPerSample, _capture.WaveFormat.Channels, AudioEncoding.Pcm, _capture.WaveFormat.ExtraSize); //WAV file must be 16-bit PCM file for normalizing with normalize.exe
                _wavWriter              = new WaveWriter(FileStrWAV, _wavWriterFormat);
                _wavWriterPositionBytes = 0;

                soundInSource.DataAvailable += (s, capData) =>
                {
                    if (RecordState == RecordStates.RECORDING)              //Only record when RecordState is RECORDING
                    {
                        byte[] buffer = new byte[soundInSourcePCM.WaveFormat.BytesPerSecond / 2];
                        int    read;

                        while ((read = soundInSourcePCM.Read(buffer, 0, buffer.Length)) > 0) //keep reading as long as we still get some data
                        {
                            _wavWriter.Write(buffer, 0, read);                               //write the read data to a file
                            _wavWriterPositionBytes += read;
                        }
                    }
                };

                _capture.Start();

                RecordState = RecordStates.RECORDING;
                _logHandle.Report(new LogEventInfo("Record (\"" + TrackInfo?.TrackName + "\") started."));
                WasRecordPaused = false;
            }
            catch (Exception ex)
            {
                _logHandle.Report(new LogEventError("Error while starting record: " + ex.Message));
            }
        }
        private void StartCapture(string fileName)
        {
            //Capture Mode
            CaptureMode = (CaptureMode)1;
            DataFlow dataFlow = CaptureMode == CaptureMode.Capture ? DataFlow.Capture : DataFlow.Render;
            //

            //Getting the audio devices from
            var devices = MMDeviceEnumerator.EnumerateDevices(dataFlow, DeviceState.Active);

            if (!devices.Any())
            {
                MessageBox.Show("No devices found.");
                return;
            }

            int selectedDeviceIndex = 0;

            SelectedDevice = devices[selectedDeviceIndex];

            if (SelectedDevice == null)
            {
                return;
            }

            if (CaptureMode == CaptureMode.Capture)
            {
                _soundIn = new WasapiCapture();
            }
            else
            {
                _soundIn = new WasapiLoopbackCapture();
            }

            _soundIn.Device = SelectedDevice;

            //Sample rate of audio
            int sampleRate = 16000;
            //bits per rate
            int bitsPerSample = 16;
            //chanels
            int channels = 1;


            //initialize the soundIn instance
            _soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            var soundInSource = new SoundInSource(_soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            IWaveSource convertedSource = soundInSource
                                          .ChangeSampleRate(sampleRate) // sample rate
                                          .ToSampleSource()
                                          .ToWaveSource(bitsPerSample); //bits per sample

            //channels=1 then we  need to create  mono audio
            convertedSource = convertedSource.ToMono();

            AudioToText audioToText = new AudioToText();

            audioToText.SetFolderPermission(_folderPath);

            //create a new wavefile
            waveWriter = new WaveWriter(fileName, convertedSource.WaveFormat);
            //register an event handler for the DataAvailable event of
            //the soundInSource
            //Important: use the DataAvailable of the SoundInSource
            //If you use the DataAvailable event of the ISoundIn itself
            //the data recorded by that event might won't be available at the
            //soundInSource yet
            soundInSource.DataAvailable += (s, e) =>
            {
                //read data from the converedSource
                //important: don't use the e.Data here
                //the e.Data contains the raw data provided by the
                //soundInSource which won't have your target format
                byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                int    read;

                //keep reading as long as we still get some data
                //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    //write the read data to a file
                    // ReSharper disable once AccessToDisposedClosure
                    waveWriter.Write(buffer, 0, read);
                }
            };

            //we've set everything we need -> start capturing data
            objStopWatch.Start();
            _soundIn.Start();
        }
Example #29
0
        private void Start()
        {
            if (_starting)
            {
                return;
            }

            Logger.Debug("Starting audio capture for device: {0}", Device?.FriendlyName ?? "default");
            _starting = true;

            try
            {
                Stop();

                if (Type == MmDeviceType.Input)
                {
                    _soundIn = Device != null
                        ? new WasapiCapture {
                        Device = Device
                    }
                        : new WasapiCapture();
                }
                else
                {
                    _soundIn = Device != null
                        ? new WasapiLoopbackCapture {
                        Device = Device
                    }
                        : new WasapiLoopbackCapture();
                }

                _soundIn.Initialize();

                var soundInSource = new SoundInSource(_soundIn);
                _source = soundInSource.ToSampleSource().AppendSource(x => new GainSource(x), out _volume);

                // create a spectrum provider which provides fft data based on some input
                _spectrumProvider = new BasicSpectrumProvider(_source.WaveFormat.Channels, _source.WaveFormat.SampleRate,
                                                              FftSize);

                // the SingleBlockNotificationStream is used to intercept the played samples
                var notificationSource = new SingleBlockNotificationStream(_source);
                // pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
                notificationSource.SingleBlockRead += (s, a) => _spectrumProvider.Add(a.Left, a.Right);

                var waveSource = notificationSource.ToWaveSource(16);
                // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
                var buffer = new byte[waveSource.WaveFormat.BytesPerSecond / 2];
                soundInSource.DataAvailable += (s, aEvent) =>
                {
                    while (waveSource.Read(buffer, 0, buffer.Length) > 0)
                    {
                    }
                };

                _lineSpectrum   = null;
                _singleSpectrum = new SingleSpectrum(FftSize, _spectrumProvider);
                _mayStop        = false;

                _disableTimer.Start();
                _volumeTimer.Start();
                _soundIn.Start();

                Running = true;
            }
            catch (Exception e)
            {
                Logger.Warn(e, "Failed to start WASAPI audio capture");
            }
            _starting = false;
        }
Example #30
0
        public void Record(string filename)
        {
            if (string.IsNullOrWhiteSpace(filename))
            {
                return;
            }

            cachedPosition = TimeSpan.Zero;
            position       = TimeSpan.Zero;
            sampleLength   = 0;
            recordedData   = new List <float>();

            if (InputDevice == null)
            {
                return;
            }

            if (recordingState == RecordingState.Recording)
            {
                return;
            }

            recordingState = RecordingState.Recording;

            if (inputDevice.Type == DeviceType.Capture)
            {
                _capture = new WasapiCapture();
            }
            else
            {
                _capture = new WasapiLoopbackCapture();
            }

            _capture.Device = inputDevice.ActualDevice;
            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _soundInSource.DataAvailable += _soundInSource_DataAvailable;

            _waveSource = _soundInSource
                          .ChangeSampleRate(SampleRate)
                          .ToSampleSource()
                          .ToWaveSource(BitResolution)
                          .ToMono();

            spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels,
                                                         _waveSource.WaveFormat.SampleRate,
                                                         CSCore.DSP.FftSize.Fft4096);

            _waveWriter = new WaveWriter(filename, _waveSource.WaveFormat);

            //the SingleBlockNotificationStream is used to intercept the played samples
            _notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource());
            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            _notificationSource.SingleBlockRead += _notificationSource_SingleBlockRead;
            _waveSource = _notificationSource.ToWaveSource(16);

            RaiseSourceEvent(SourceEventType.Loaded);
            _capture.Start();
            RaiseSourcePropertyChangedEvent(SourceProperty.RecordingState, _capture.RecordingState);
        }