Exemplo n.º 1
0
        public void Load(Uri uri)
        {
            Dispose();
            _uri        = uri;
            _waveSource = CSCore.Codecs.CodecFactory.Instance.GetCodec(_uri)
                          .ToSampleSource()
                          .ToMono()
                          .ToWaveSource();

            spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels,
                                                         _waveSource.WaveFormat.SampleRate,
                                                         CSCore.DSP.FftSize.Fft4096);

            //the SingleBlockNotificationStream is used to intercept the played samples
            var notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource());

            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += (s, a) => SpectrumProvider.Add(a.Left, a.Right);
            _waveSource = notificationSource.ToWaveSource(16);
            // Load the sample source
            var ws = CSCore.Codecs.CodecFactory.Instance.GetCodec(_uri);

            _sampleSource = ws.ToSampleSource();
            RaiseSourceEvent(SourceEventType.Loaded);
            hasMedia = true;
            LoadSoundOut();
        }
Exemplo n.º 2
0
        public void Record(string filename)
        {
            if (string.IsNullOrWhiteSpace(filename))
            {
                return;
            }

            cachedPosition = TimeSpan.Zero;
            position       = TimeSpan.Zero;
            sampleLength   = 0;
            recordedData   = new List <float>();

            if (InputDevice == null)
            {
                return;
            }

            if (recordingState == RecordingState.Recording)
            {
                return;
            }

            recordingState = RecordingState.Recording;

            if (inputDevice.Type == DeviceType.Capture)
            {
                _capture = new WasapiCapture();
            }
            else
            {
                _capture = new WasapiLoopbackCapture();
            }

            _capture.Device = inputDevice.ActualDevice;
            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _soundInSource.DataAvailable += _soundInSource_DataAvailable;

            _waveSource = _soundInSource
                          .ChangeSampleRate(SampleRate)
                          .ToSampleSource()
                          .ToWaveSource(BitResolution)
                          .ToMono();

            spectrumProvider = new BasicSpectrumProvider(_waveSource.WaveFormat.Channels,
                                                         _waveSource.WaveFormat.SampleRate,
                                                         CSCore.DSP.FftSize.Fft4096);

            _waveWriter = new WaveWriter(filename, _waveSource.WaveFormat);

            //the SingleBlockNotificationStream is used to intercept the played samples
            _notificationSource = new SingleBlockNotificationStream(_waveSource.ToSampleSource());
            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            _notificationSource.SingleBlockRead += _notificationSource_SingleBlockRead;
            _waveSource = _notificationSource.ToWaveSource(16);

            RaiseSourceEvent(SourceEventType.Loaded);
            _capture.Start();
            RaiseSourcePropertyChangedEvent(SourceProperty.RecordingState, _capture.RecordingState);
        }