internal void StartListen()
        {
            capture.Initialize();
            soundInSource         = new SoundInSource(capture);
            basicSpectrumProvider = new BasicSpectrumProvider(soundInSource.WaveFormat.Channels, soundInSource.WaveFormat.SampleRate, C_FftSize);
            lineSpectrum          = new LineSpectrum(C_FftSize, minFrequency, maxFrequency)
            {
                SpectrumProvider = basicSpectrumProvider,
                BarCount         = spectrumSize,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = EScalingStrategy.Sqrt
            };

            capture.Start();

            ISampleSource sampleSource = soundInSource.ToSampleSource();

            singleBlockNotificationStream = new SingleBlockNotificationStream(sampleSource);
            realtimeSource = singleBlockNotificationStream.ToWaveSource();

            byte[] buffer = new byte[realtimeSource.WaveFormat.BytesPerSecond / 128];

            soundInSource.DataAvailable += (s, ea) =>
            {
                while (realtimeSource.Read(buffer, 0, buffer.Length) > 0)
                {
                    var spectrumData = lineSpectrum.GetSpectrumData(C_MaxAudioValue);

                    if (spectrumData != null)
                    {
                        receiveAudio?.Invoke(spectrumData);
                    }
                }
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
        }
Esempio n. 2
0
        public void StartListen()
        {
            switch (_captureType)
            {
            case WasapiCaptureType.Loopback:
                _wasapiCapture = new WasapiLoopbackCapture();
                break;

            case WasapiCaptureType.Microphone:
                MMDevice defaultMicrophone;
                using (var deviceEnumerator = new MMDeviceEnumerator())
                {
                    defaultMicrophone = deviceEnumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Communications);
                }
                _wasapiCapture        = new WasapiCapture();
                _wasapiCapture.Device = defaultMicrophone;
                break;

            default:
                throw new InvalidOperationException("Unhandled WasapiCaptureType");
            }

            _wasapiCapture.Initialize();

            _soundInSource = new SoundInSource(_wasapiCapture);

            _basicSpectrumProvider = new BasicSpectrumProvider(_soundInSource.WaveFormat.Channels, _soundInSource.WaveFormat.SampleRate, CFftSize);

            _lineSpectrum = new LineSpectrum(CFftSize, _minFrequency, _maxFrequency)
            {
                SpectrumProvider = _basicSpectrumProvider,
                BarCount         = _spectrumSize,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = _scalingStrategy
            };

            _wasapiCapture.Start();

            var sampleSource = _soundInSource.ToSampleSource();

            if (_filters != null && _filters.Length > 0)
            {
                foreach (var filter in _filters)
                {
                    sampleSource = sampleSource.AppendSource(x => new BiQuadFilterSource(x));
                    var biQuadSource = (BiQuadFilterSource)sampleSource;
                    switch (filter.Type)
                    {
                    case WasapiAudioFilterType.LowPass:
                        biQuadSource.Filter = new LowpassFilter(_soundInSource.WaveFormat.SampleRate, filter.Frequency);
                        break;

                    case WasapiAudioFilterType.HighPass:
                        biQuadSource.Filter = new HighpassFilter(_soundInSource.WaveFormat.SampleRate, filter.Frequency);
                        break;

                    case WasapiAudioFilterType.BandPass:
                        biQuadSource.Filter = new BandpassFilter(_soundInSource.WaveFormat.SampleRate, filter.Frequency);
                        break;
                    }
                }
            }

            _singleBlockNotificationStream = new SingleBlockNotificationStream(sampleSource);
            _realtimeSource = _singleBlockNotificationStream.ToWaveSource();

            var buffer = new byte[_realtimeSource.WaveFormat.BytesPerSecond / 2];

            _soundInSource.DataAvailable += (s, ea) =>
            {
                while (_realtimeSource.Read(buffer, 0, buffer.Length) > 0)
                {
                    float[] spectrumData = _lineSpectrum.GetSpectrumData(MaxAudioValue);

                    if (spectrumData != null)
                    {
                        _receiveAudio?.Invoke(spectrumData);
                    }
                }
            };

            _singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
        }