public virtual void CanCaptureAudio()
        {
            const int runs = 10;
            int       i    = 0;

            using (var waitHandle = new AutoResetEvent(false))
            {
                for (; i < runs; i++)
                {
                    _soundIn.DataAvailable += (s, e) =>
                    {
// ReSharper disable once AccessToDisposedClosure
                        waitHandle.Set();
                    };

                    _soundIn.Initialize();
                    Assert.AreEqual(RecordingState.Stopped, _soundIn.RecordingState);

                    _soundIn.Start();
                    Assert.AreEqual(RecordingState.Recording, _soundIn.RecordingState);

                    if (!waitHandle.WaitOne(Debugger.IsAttached ? Timeout.Infinite : 2000))
                    {
                        Assert.Fail("Timeout");
                    }

                    _soundIn.Stop();
                    Assert.AreEqual(RecordingState.Stopped, _soundIn.RecordingState);
                }
            }

            Assert.AreEqual(runs, i);
        }
Exemplo n.º 2
0
        static void SenderMain(string IPAddress, int tcpPort, int udpPort)
        {
            UdpSender udpSend;

            soundIn = new WasapiLoopbackCapture(30, new WaveFormat(44100, 16, 2));
            {
                //important: always initialize the soundIn instance before creating the
                //SoundInSource. The SoundInSource needs the WaveFormat of the soundIn,
                //which gets determined by the soundIn.Initialize method.

                soundIn.Initialize();

                //wrap a sound source around the soundIn instance
                //in order to prevent playback interruptions, set FillWithZeros to true
                //otherwise, if the SoundIn does not provide enough data, the playback stops
                IWaveSource source = new SoundInSource(soundIn)
                {
                    FillWithZeros = true
                };

                udpSend = new UdpSender(soundIn, IPAddress, tcpPort, udpPort);

                soundIn.DataAvailable += udpSend.AudioCaptureEvent;

                //start capturing data
                soundIn.Start();


                Console.WriteLine("Sending audio... Press any key to exit the program.");
                Console.ReadKey();
            }
        }
Exemplo n.º 3
0
        static void ReceiverMain(int tcpPort, int udpPort)
        {
            //UdpReceiver soundIn;
            soundIn = new UdpReceiver(tcpPort, udpPort);
            soundIn.Initialize();

            IWaveSource source = new SoundInSource(soundIn)
            {
                FillWithZeros = true
            };

            soundIn.Start();

            //create a soundOut instance to play the data
            soundOut = new WasapiOut();
            //initialize the soundOut with the echoSource
            //the echoSource provides data from the "source" and applies the echo effect
            //the "source" provides data from the "soundIn" instance
            soundOut.Initialize(source);

            //play
            soundOut.Play();

            /*System.Timers.Timer aTimer = new System.Timers.Timer();
             * aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent);
             * aTimer.Interval = 2000;
             * aTimer.Enabled = true;*/

            Console.WriteLine("Receiving audio... Press any key to exit the program.");
            Console.ReadKey();
        }
Exemplo n.º 4
0
        public MP3Recorder(string filename)
        {
            if (File.Exists(filename))
            {
                File.Delete(filename);
            }
            wasapiCapture_ = new WasapiCapture();
            wasapiCapture_.Initialize();
            var
                wasapiCaptureSource = new SoundInSource(wasapiCapture_);

            stereoSource_ = wasapiCaptureSource.ToStereo();
            writer_       = MediaFoundationEncoder.CreateMP3Encoder(stereoSource_.WaveFormat, filename);
            byte []
            buffer = new byte[stereoSource_.WaveFormat.BytesPerSecond];
            wasapiCaptureSource.DataAvailable += (s, e) =>
            {
                int
                    read = stereoSource_.Read(buffer, 0, buffer.Length);
                writer_.Write(buffer, 0, read);
            };
            wasapiCapture_.Start();
        }
Exemplo n.º 5
0
        private void Start()
        {
            if (_starting)
            {
                return;
            }

            Logger.Debug("Starting audio capture for device: {0}", Device?.FriendlyName ?? "default");
            _starting = true;

            try
            {
                Stop();

                if (Type == MmDeviceType.Input)
                {
                    _soundIn = Device != null
                        ? new WasapiCapture {
                        Device = Device
                    }
                        : new WasapiCapture();
                }
                else
                {
                    _soundIn = Device != null
                        ? new WasapiLoopbackCapture {
                        Device = Device
                    }
                        : new WasapiLoopbackCapture();
                }

                _soundIn.Initialize();

                var soundInSource = new SoundInSource(_soundIn);
                _source = soundInSource.ToSampleSource().AppendSource(x => new GainSource(x), out _volume);

                // create a spectrum provider which provides fft data based on some input
                _spectrumProvider = new BasicSpectrumProvider(_source.WaveFormat.Channels, _source.WaveFormat.SampleRate,
                                                              FftSize);

                // the SingleBlockNotificationStream is used to intercept the played samples
                var notificationSource = new SingleBlockNotificationStream(_source);
                // pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
                notificationSource.SingleBlockRead += (s, a) => _spectrumProvider.Add(a.Left, a.Right);

                var waveSource = notificationSource.ToWaveSource(16);
                // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
                var buffer = new byte[waveSource.WaveFormat.BytesPerSecond / 2];
                soundInSource.DataAvailable += (s, aEvent) =>
                {
                    while (waveSource.Read(buffer, 0, buffer.Length) > 0)
                    {
                    }
                };

                _lineSpectrum   = null;
                _singleSpectrum = new SingleSpectrum(FftSize, _spectrumProvider);
                _mayStop        = false;

                _disableTimer.Start();
                _volumeTimer.Start();
                _soundIn.Start();

                Running = true;
            }
            catch (Exception e)
            {
                Logger.Warn(e, "Failed to start WASAPI audio capture");
            }
            _starting = false;
        }