예제 #1
0
        private void OpenDefault()
        {
            Stop();

            //open the default device
            _soundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            //_soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //play the audio
            _soundIn.Start();

            timer.Start();
        }
예제 #2
0
        public Visualization()
        {
            new Thread(() =>
            {
                Stop();

                //open the default device
                _soundIn = new WasapiLoopbackCapture();
                _soundIn.Initialize();

                var soundInSource = new SoundInSource(_soundIn);

                SetupSampleSource(soundInSource.ToSampleSource());

                // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
                byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
                soundInSource.DataAvailable += (s, aEvent) =>
                {
                    int read;
                    while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        ;
                    }
                };

                _soundIn.Start(); //play the audio

                _Timer.Elapsed += new ElapsedEventHandler(GenerateEvent);
                _Timer.Start();
            }).Start();
        }
예제 #3
0
        /// <summary>
        /// Begin the audio input
        /// </summary>
        public static void InitAudioSource(MMDevice device)
        {
            Stop();

            //open default audio device
            m_SoundIn = new WasapiLoopbackCapture();

            m_SoundIn.Device = device;
            m_SoundIn.Initialize();

            var           soundInSource = new SoundInSource(m_SoundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            SetupSampleSource(source);

            byte[] buffer = new byte[m_Source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = m_Source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            m_SoundIn.Start();

            MainWindow.StartTimer();
        }
예제 #4
0
        public FFTDataProvider(FftSize fftSize)
        {
            this.fftSize = fftSize;
            _soundIn     = new WasapiLoopbackCapture();
            _soundIn.Initialize();
            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            fftProvider  = new FftProvider(source.WaveFormat.Channels, fftSize);
            fftProvider2 = new FftProvider(source.WaveFormat.Channels, fftSize);
            var notificationSource = new SingleBlockNotificationStream(source);

            SamplesRate = source.WaveFormat.SampleRate;
            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += addToFFTs;
            var _source = notificationSource.ToWaveSource(16);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 8];// 1/8 seconds
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };
            _readStarted = false;
        }
예제 #5
0
    void Start()
    {
        fftData = new float[fftSize];

        persistentSamples = new FixedQueue <float> [PersSampleUpperIndex - PersSampleLowerIndex];
        smoothedSamples   = new float[persistentSamples.Length];
        for (int i = 0; i < persistentSamples.Length; i++)
        {
            persistentSamples[i] = new FixedQueue <float>(PersistenSampleLength);
        }

        line        = GetComponent <LineRenderer>();
        leftChannel = new float[TotalSamples];

        capture = new WasapiLoopbackCapture();
        capture.Initialize();
        var soundInSource = new SoundInSource(capture);
        var source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out pitchShifter);

        fft1 = new FftTransform(source.WaveFormat.Channels, fftSize);
        fft2 = new FftProvider(source.WaveFormat.Channels, FftSize.Fft2048);

        stream = new SingleBlockNotificationStream(pitchShifter);
        stream.SingleBlockRead += SingleBlockRead;

        waveSource = stream.ToWaveSource(16);
        buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

        soundInSource.DataAvailable += DataAvailable;

        capture.DataAvailable += (sender, args) => DataAvailable(sender, args);
        capture.Start();
    }
예제 #6
0
            public SoundCapture()
            {
                // This uses the wasapi api to get any sound data played by the computer
                capture = new WasapiLoopbackCapture();

                capture.Initialize();

                // Get our capture as a source
                IWaveSource source = new SoundInSource(capture);


                // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

                // This is the typical size, you can change this for higher detail as needed
                fftSize = FftSize.Fft4096;

                // Actual fft data
                fftBuffer = new float[(int)fftSize];


                // Tells us when data is available to send to our spectrum
                var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

                notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

                // We use this to request data so it actualy flows through (figuring this out took forever...)
                finalSource = notificationSource.ToWaveSource();

                capture.DataAvailable += Capture_DataAvailable;
                capture.Start();
            }
예제 #7
0
        private void StartCapture()
        {
            if (SelectedDevice == null)
            {
                return;
            }

            if (CaptureMode == "Capture")
            {
                _soundIn = new WasapiCapture();
            }

            _soundIn.Device = SelectedDevice;
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();
            _writer      = new WaveWriter("tmp.wav", _finalSource.WaveFormat);

            byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            //singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead; // visualization

            _soundIn.Start();
        }
        private void button_start_Click(object sender, EventArgs e)
        {
            wavein        = null;
            wavein        = new WasapiCapture(false, AudioClientShareMode.Exclusive, 5);
            wavein.Device = inputDevices[comboBox_mic.SelectedIndex];
            wavein.Initialize();
            wavein.Start();

            source = new SoundInSource(wavein)
            {
                FillWithZeros = true
            };
            //add my special effects in the chain
            efxProcs             = new EfxProcs(source.ToSampleSource().ToMono());
            efxProcs.gain        = linearGain; //keep track of this changing value
            efxProcs.pitchFactor = pitchShift; //keep track of pitch

            waveout        = null;
            waveout        = new WasapiOut(false, AudioClientShareMode.Exclusive, 5);
            waveout.Device = outputDevices[comboBox_speaker.SelectedIndex];
            waveout.Initialize(efxProcs.ToWaveSource()); //source.ToSampleSource().ToWaveSource());//
            waveout.Play();
            //CSCore.Streams.SampleConverter.SampleToIeeeFloat32 sampleToIeee = new CSCore.Streams.SampleConverter.SampleToIeeeFloat32(source.ToSampleSource());
            timer1.Enabled = true;
        }
예제 #9
0
        // Helper for State.Looking
        void StartCapture()
        {
            Debug.Assert(m_State == State.Looking);
            Debug.Assert(m_AudioCapture != null);

            // TODO: This starts as a WaveSource (raw bytes), converts to floats
            // so we can notify once for each sample.
            // The SingleBlockNotificationStream is very garbagey; we should use our own
            // wrapper that grabs all the samples read and pushes them into m_HotValues
            // en masse instead of one-at-a-time.
            var soundInSource = new SoundInSource(m_AudioCapture);
            var sampleSource  = soundInSource.ToSampleSource();
            var singleBlockNotificationStream = new SingleBlockNotificationStream(sampleSource);

            m_FinalSource = singleBlockNotificationStream;

            // Consume and discard any bytes when they come in. We do this for
            // its side effects (firing the SingleBlockNotificationStream event).
            // buffer is closed-over by the lambda.
            float[] buffer = new float[m_FinalSource.WaveFormat.BytesPerSecond / 4];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                do
                {
                    read = m_FinalSource.Read(buffer, 0, buffer.Length);
                } while (read > 0);
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;
            m_AudioCapture.Start();
        }
예제 #10
0
        /// <summary>
        /// Initializes the visualizer and audio capture.
        /// </summary>
        protected override void Initialize()
        {
            IsMouseVisible = true;

            graphics.HardwareModeSwitch = false;

            graphics.PreferredBackBufferWidth = ViewportWidth;
            graphics.PreferredBackBufferHeight = ViewportHeight;
            //graphics.IsFullScreen = true;
            graphics.ApplyChanges();

            viewportAdapter = new BoxingViewportAdapter(Window, GraphicsDevice, ViewportWidth, ViewportHeight);
            Camera = new Camera2D(viewportAdapter);

            soundIn = new WasapiLoopbackCapture();
            //soundIn = new WasapiCapture();
            soundIn.Initialize();

            SoundInSource inSource = new SoundInSource(soundIn);
            ISampleSource sampleSource = inSource.ToSampleSource();

            SetupSampleSource(sampleSource);

            byte[] buffer = new byte[source.WaveFormat.BytesPerSecond / 2];

            inSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = source.Read(buffer, 0, buffer.Length)) > 0) ;
            };

            soundIn.Start();

            base.Initialize();
        }
예제 #11
0
        public void InitialiseAudioProgram()
        {
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            var spectrumProvider = new SpectrumProvider(2, 48000, FftSize.Fft4096);

            _spectrum = new LineSpectrum(spectrumProvider, _barCount);
            var notificationSource = new SingleBlockNotificationStream(source);

            notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

            _source = notificationSource.ToWaveSource(16);

            // Read from the source otherwise SingleBlockRead is never called
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (src, evt) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _soundIn.Start();

            for (int i = 0; i < MatrixCount; i++)
            {
                _Programs[i] = i == 0 ? AudioSequence().GetEnumerator() : null;
            }
        }
예제 #12
0
        private void CreateLoopback()
        {
            try {
                loopback.Initialize();
            } catch (Exception e) {
                Debug.LogException(e);

                return;
            }

            soundIn          = new SoundInSource(loopback);
            spectrumProvider = new BasicSpectrumProvider(soundIn.WaveFormat.Channels, soundIn.WaveFormat.SampleRate, FftSize.Fft4096);
            spectrum         = new LineSpectrum(FftSize.Fft4096)
            {
                SpectrumProvider = spectrumProvider,
                BarCount         = 512,
                UseAverage       = true,
                IsXLogScale      = true,
            };

            loopback.Start();

            blockNotifyStream = new SingleBlockNotificationStream(soundIn.ToSampleSource());
            realtime          = blockNotifyStream.ToWaveSource();

            buffer = new float[realtime.WaveFormat.BytesPerSecond / sizeof(float) / 2];

            soundIn.DataAvailable += AudioDataAvailable;

            blockNotifyStream.SingleBlockRead += SingleBlockRead;
        }
예제 #13
0
        //most of this code is stolen from the example in the CSCore github so idk what it does 40% of the time
        public void Initialize(FFTSize _size = FFTSize._4096)
        {
            size     = _size;
            _soundIn = new WasapiLoopbackCapture();

            _soundIn.Initialize();
            var soundInSource = new SoundInSource(_soundIn);

            var source = soundInSource.ToSampleSource();

            _fft = new FftProvider(source.WaveFormat.Channels, (FftSize)size);

            var n = new SingleBlockNotificationStream(source);

            n.SingleBlockRead += (s, a) => _fft.Add(a.Left, a.Right);

            _source = n.ToWaveSource(16);
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };
            _soundIn.Start();
        }
예제 #14
0
        static void Main(string[] args)

        {
            MMDevice dev = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            capture        = new WasapiLoopbackCapture();
            capture.Device = dev;
            capture.Initialize();

            SoundInSource soundInSource = new SoundInSource(capture);

            nStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
            final   = nStream.ToWaveSource();
            nStream.SingleBlockRead     += NStream_SingleBlockRead;
            soundInSource.DataAvailable += encode;
            trashBuf = new byte[final.WaveFormat.BytesPerSecond / 2];

            Console.WriteLine($"sample rate:{capture.WaveFormat.SampleRate}");
            Console.WriteLine($"bits per sample:{capture.WaveFormat.BitsPerSample }");
            Console.WriteLine($"channels:{capture.WaveFormat.Channels }");
            Console.WriteLine($"bytes per sample:{capture.WaveFormat.BytesPerSample }");
            Console.WriteLine($"bytes per second:{capture.WaveFormat.BytesPerSecond }");
            Console.WriteLine($"AudioEncoding:{capture.WaveFormat.WaveFormatTag  }");


            EncodingContext context = FrameEncoder.GetDefaultsContext();

            context.Channels        = 6;
            context.SampleRate      = capture.WaveFormat.SampleRate;
            context.AudioCodingMode = AudioCodingMode.Front3Rear2;
            context.HasLfe          = true;
            context.SampleFormat    = A52SampleFormat.Float;
            enc = new FrameEncoderFloat(ref context);

            //_writer = new WaveWriter("test.ac3", final.WaveFormat);


            capture.Start();

            wBuffSrc = new WriteableBufferingSource(new WaveFormat(capture.WaveFormat.SampleRate, capture.WaveFormat.BitsPerSample, capture.WaveFormat.Channels, AudioEncoding.WAVE_FORMAT_DOLBY_AC3_SPDIF), (int)capture.WaveFormat.MillisecondsToBytes(20));

            w = new WasapiOut2(false, AudioClientShareMode.Shared, 20);

            w.Device = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active).Where(x => x.FriendlyName.Contains("Digital")).Single();
            AudioClient a = AudioClient.FromMMDevice(w.Device);

            w.Initialize(wBuffSrc);
            w.Play();


            Task.Run(async() => await encoderThread());
            //encodeSinus();

            Console.ReadLine();

            System.Environment.Exit(0);
        }
예제 #15
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Stop();
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _soundIn.Start();
            try
            {
                MSI = new Lighting(Allowed);
            }
            catch (MSIRGB.Lighting.Exception ex)
            {
                if (ex.GetErrorCode() == Lighting.ErrorCode.DriverLoadFailed)
                {
                    MessageBox.Show("Please run program as administrator.");
                    Close();
                }
                else if (ex.GetErrorCode() == Lighting.ErrorCode.MotherboardModelNotSupported)
                {
                    if (MessageBox.Show("Your motherboard not supported but it will work, Application will run at your own risk", "MSI Magic Light", MessageBoxButtons.OKCancel) == DialogResult.OK)
                    {
                        Allowed = true;
                    }
                    else
                    {
                        Close();
                    }
                }
                else if (ex.GetErrorCode() == Lighting.ErrorCode.MotherboardVendorNotSupported)
                {
                    MessageBox.Show("Your motherboard not supported at all, We are sorry :(");
                    this.Close();
                }
                else
                {
                    MessageBox.Show("Unknown Error, Please report it on github");
                    this.Close();
                }
            }
        }
        private ISampleSource CreateSampleSource(WasapiCapture wasapiCapture)
        {
            var source = new SoundInSource(wasapiCapture)
            {
                FillWithZeros = false
            };

            source.DataAvailable += DataAvailableHandler;
            return(source.ToSampleSource());
        }
예제 #17
0
        /// <summary>
        /// Initializes a new instance of the recorder class.
        /// </summary>
        /// <param name="captureDevice"></param>
        /// <param name="captureMode"></param>
        public Recorder(MMDevice captureDevice, DataFlow captureMode)
        {
            var wasapiCapture = Convert.ToBoolean(captureMode) ? new WasapiCapture() : new WasapiLoopbackCapture();

            wasapiCapture.Device = captureDevice;
            wasapiCapture.Initialize();

            _soundInSource     = new SoundInSource(wasapiCapture);
            NotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource());
            _waveStream        = NotificationStream.ToWaveSource();
        }
예제 #18
0
    void Awake()
    {
        barData = new float[numBars];
        // This uses the wasapi api to get any sound data played by the computer
        switch (audioType)
        {
        case AudioSourceType.Microphone: capture = new WasapiCapture();
            break;

        case AudioSourceType.Speakers: capture = new WasapiLoopbackCapture();
            break;
        }

        capture.Initialize();

        // Get our capture as a source
        IWaveSource source = new SoundInSource(capture);


        // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

        // This is the typical size, you can change this for higher detail as needed
        fftSize = FftSize.Fft4096;

        // Actual fft data
        fftBuffer = new float[(int)fftSize];

        // These are the actual classes that give you spectrum data
        // The specific vars of lineSpectrum are changed below in the editor so most of these aren't that important here
        spectrumProvider = new BasicSpectrumProvider(capture.WaveFormat.Channels,
                                                     capture.WaveFormat.SampleRate, fftSize);

        lineSpectrum = new LineSpectrum(fftSize)
        {
            SpectrumProvider = spectrumProvider,
            UseAverage       = isAverage,
            BarCount         = numBars,
            BarSpacing       = 2,
            IsXLogScale      = false,
            ScalingStrategy  = ScalingStrategy.Linear
        };

        // Tells us when data is available to send to our spectrum
        var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

        notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

        // We use this to request data so it actualy flows through (figuring this out took forever...)
        finalSource = notificationSource.ToWaveSource();

        capture.DataAvailable += Capture_DataAvailable;
        capture.Start();
    }
예제 #19
0
    // Start is called before the first frame update
    void Start()
    {
        loopbackCapture = new WasapiLoopbackCapture();
        loopbackCapture.Initialize();

        soundInSource = new SoundInSource(loopbackCapture);

        fftBuffer = new float[(int)CFftSize];

        basicSpectrumProvider = new BasicSpectrumProvider(soundInSource.WaveFormat.Channels,
                                                          soundInSource.WaveFormat.SampleRate, CFftSize);

        lineSpectrum = new LineSpectrum(CFftSize)
        {
            SpectrumProvider = basicSpectrumProvider,
            BarCount         = numBars,
            UseAverage       = true,
            IsXLogScale      = false,
            ScalingStrategy  = ScalingStrategy.Linear
        };

        var notificationSource = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

        notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

        finalSource = notificationSource.ToWaveSource();

        loopbackCapture.DataAvailable += Capture_DataAvailable;
        loopbackCapture.Start();

        //singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
        //realTimeSource = singleBlockNotificationStream.ToWaveSource();

        //byte[] buffer = new byte[realTimeSource.WaveFormat.BytesPerSecond / 2];

        //soundInSource.DataAvailable += (s, ea) =>
        //{
        //    while (realTimeSource.Read(buffer, 0, buffer.Length) > 0)
        //    {
        //        float[] spectrumData = lineSpectrum.GetSpectrumData(10);
        //        receiveAudio(spectrumData);
        //        Debug.Log(receiveAudio);

        //        if (spectrumData != null && receiveAudio != null)
        //        {
        //            receiveAudio(spectrumData);
        //            Debug.Log(receiveAudio);
        //        }
        //    }
        //};

        //singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
    }
예제 #20
0
파일: AudioManager.cs 프로젝트: Slion/CIC
        /// <summary>
        ///
        /// </summary>
        private void StartAudioVisualization()
        {
            //Open the default device
            iSoundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            //iSoundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            iSoundIn.Initialize();

            SoundInSource soundInSource = new SoundInSource(iSoundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            const FftSize fftSize = FftSize.Fft2048;
            //create a spectrum provider which provides fft data based on some input
            BasicSpectrumProvider spectrumProvider = new BasicSpectrumProvider(source.WaveFormat.Channels, source.WaveFormat.SampleRate, fftSize);

            //linespectrum and voiceprint3dspectrum used for rendering some fft data
            //in oder to get some fft data, set the previously created spectrumprovider
            iLineSpectrum = new LineSpectrum(fftSize)
            {
                SpectrumProvider = spectrumProvider,
                UseAverage       = false, // Does not matter since we hacked it
                BarCount         = 16,
                BarSpacing       = 1,
                IsXLogScale      = true,                   // Does not matter since we hacked it
                ScalingStrategy  = ScalingStrategy.Decibel // Does not matter since we hacked it
            };


            //the SingleBlockNotificationStream is used to intercept the played samples
            var notificationSource = new SingleBlockNotificationStream(source);

            //pass the intercepted samples as input data to the spectrumprovider (which will calculate a fft based on them)
            notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

            iWaveSource = notificationSource.ToWaveSource(16);


            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[iWaveSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = iWaveSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //Start recording
            iSoundIn.Start();
        }
예제 #21
0
        private bool StartFullDuplex()
        {
            try
            {
                //Init sound capture device with a latency of 5 ms.
                //ATTENTION: WasapiCapture works on Vista and higher Windows versions.
                mSoundIn        = new WasapiCapture(false, AudioClientShareMode.Exclusive, 5);
                mSoundIn.Device = mInputDevices[cmbInput.SelectedIndex];
                mSoundIn.Initialize();
                mSoundIn.Start();

                var source = new SoundInSource(mSoundIn)
                {
                    FillWithZeros = true
                };

                //Init DSP for pitch shifting
                mDsp        = new SampleDSP(source.ToSampleSource().ToMono());
                mDsp.GainDB = trackGain.Value;
                SetPitchShiftValue();

                //Init mixer
                mMixer = new SimpleMixer(1, 44100) //mono, 44,1 KHz
                {
                    FillWithZeros = false,
                    DivideResult  = true //This is set to true for avoiding tick sounds because of exceeding -1 and 1
                };

                //Add our sound source to the mixer
                mMixer.AddSource(mDsp.ChangeSampleRate(mMixer.WaveFormat.SampleRate));

                //Init sound play device with a latency of 5 ms.
                mSoundOut        = new WasapiOut(false, AudioClientShareMode.Exclusive, 5);
                mSoundOut.Device = mOutputDevices[cmbOutput.SelectedIndex];
                mSoundOut.Initialize(mMixer.ToWaveSource(16));

                //Start rolling!
                mSoundOut.Play();
                return(true);
            }
            catch (Exception ex)
            {
                string msg = "Error in StartFullDuplex: \r\n" + ex.Message;
                MessageBox.Show(msg);
                Debug.WriteLine(msg);
            }
            return(false);
        }
예제 #22
0
    void StartListen()
    {
        loopbackCapture = new WasapiLoopbackCapture();
        loopbackCapture.Initialize();

        soundInSource = new SoundInSource(loopbackCapture);

        loopbackCapture.Start();

        singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
        realTimeSource = singleBlockNotificationStream.ToWaveSource();

        soundInSource.DataAvailable += DataAvailable;

        singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
    }
예제 #23
0
        public bool StartCapture()
        {
            if (mmdevice == null)
            {
                return(false);
            }

            Console.WriteLine("Prepare to capturing!");
            if (mmdevice.DataFlow == DataFlow.Capture)
            {
                capture = new WasapiCapture();
            }
            else if (mmdevice.DataFlow == DataFlow.Render)
            {
                capture = new WasapiLoopbackCapture();
            }

            if (capture == null)
            {
                Console.WriteLine("Not able to open capture mode!");
                return(false);
            }

            Console.WriteLine("Selected Device: " + mmdevice.FriendlyName);

            capture.Device = mmdevice;
            capture.Initialize();

            var soundInSource = new SoundInSource(capture);

            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            waveSource = singleBlockNotificationStream.ToWaveSource();
            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;

            byte[] buffer = new byte[waveSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += delegate {
                int read;
                while ((read = waveSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                }
            };


            capture.Start();
            return(true);
        }
예제 #24
0
        public void FromDefaultDevice()
        {
            Stop();

            //open the default device
            _soundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            _soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            soundInSource = new SoundInSource(_soundIn);
            ISampleSource source = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += SoundInSource_DataAvailable;
        }
예제 #25
0
        private void Start(MMDevice device)
        {
            Stop();
            this.DeviceId = device.DeviceID;

            try
            {
                _soundIn        = new WasapiLoopbackCapture();
                _soundIn.Device = device;
                _soundIn.Initialize();
            }
            catch
            {
                _soundIn        = new WasapiLoopbackCapture(100, new WaveFormat(48000, 24, 2));
                _soundIn.Device = device;
                _soundIn.Initialize();
            }
            //Our loopback capture opens the default render device by default so the following is not needed
            //_soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);


            var soundInSource = new SoundInSource(_soundIn);
            var source        = soundInSource.ToSampleSource().AppendSource(x => new BiQuadFilterSource(x));//.AppendSource(x => new PitchShifter(x), out _);

            //source.Filter = new LowpassFilter(source.WaveFormat.SampleRate, 4000);
            //source.Filter = new HighpassFilter(source.WaveFormat.SampleRate, 1000);
            SetupSampleSource(source);
            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //play the audio
            _soundIn.Start();
        }
예제 #26
0
        public SoundEffect(SoundEffectConfig config) : base(config)
        {
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var sampleSource  = soundInSource.ToSampleSource();

            const FftSize fftSize = FftSize.Fft1024;

            _fftBuffer        = new float[(int)fftSize];
            _spectrumProvider = new SpectrumProvider(sampleSource.WaveFormat.Channels, sampleSource.WaveFormat.SampleRate, fftSize);

            var notificationSource = new DataNotificationSource(sampleSource);

            notificationSource.DataRead += (s, e) => _spectrumProvider.Add(e.Data, e.Data.Length);

            var waveSource = notificationSource.ToWaveSource(16);
            var buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

            soundInSource.DataAvailable += (s, e) => { while (waveSource.Read(buffer, 0, buffer.Length) > 0)
                                                       {
                                                           ;
                                                       }
            };

            _spectrum = new LedSpectrum(GenerateColor)
            {
                FftSize            = fftSize,
                SpectrumProvider   = _spectrumProvider,
                UseAverage         = Config.UseAverage,
                MinimumFrequency   = Config.MinimumFrequency,
                MaximumFrequency   = Config.MaximumFrequency,
                ScalingStrategy    = Config.ScalingStrategy,
                ScalingFactor      = Config.ScalingFactor,
                IsXLogScale        = false,
                SpectrumResolution = (int)fftSize
            };

            _spectrum.UpdateFrequencyMapping();
            _soundIn.Start();
        }
예제 #27
0
        void StartCapture()
        {
            capture = new WasapiLoopbackCapture();
            capture.Initialize();
            IWaveSource source = new SoundInSource(capture);

            fftSize     = FftSize.Fft2048;
            fftBuffer   = new float[2048];
            fftBuffer   = new float[(int)fftSize];
            fftProvider = new FftProvider(1, fftSize);

            notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());
            notificationSource.SingleBlockRead += SingleBlockRead;
            finalSource = notificationSource.ToWaveSource();

            capture.DataAvailable += CaptureDataAvailable;
            capture.Start();

            initialized = true;
        }
예제 #28
0
        private void VisualizerForm_Load(object sender, EventArgs e)
        {
            SoundIn = new WasapiLoopbackCapture();
            SoundIn.Initialize();

            var soundInSource = new SoundInSource(SoundIn);

            SetupSource(soundInSource.ToSampleSource());

            var buffer = new byte[Source.WaveFormat.BytesPerSecond / 2];

            soundInSource.DataAvailable += (o, args) =>
            {
                while (Source.Read(buffer, 0, buffer.Length) > 0)
                {
                }
            };

            SoundIn.Start();
        }
예제 #29
0
        public void StartListen()
        {
            _loopbackCapture = new WasapiLoopbackCapture();
            _loopbackCapture.Initialize();

            _soundInSource = new SoundInSource(_loopbackCapture);

            _basicSpectrumProvider = new BasicSpectrumProvider(_soundInSource.WaveFormat.Channels, _soundInSource.WaveFormat.SampleRate, CFftSize);

            _lineSpectrum = new LineSpectrum(CFftSize)
            {
                SpectrumProvider = _basicSpectrumProvider,
                BarCount         = _spectrumSize,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = _scalingStrategy
            };

            _loopbackCapture.Start();

            _singleBlockNotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource());
            _realtimeSource = _singleBlockNotificationStream.ToWaveSource();

            byte[] buffer = new byte[_realtimeSource.WaveFormat.BytesPerSecond / 2];

            _soundInSource.DataAvailable += (s, ea) =>
            {
                while (_realtimeSource.Read(buffer, 0, buffer.Length) > 0)
                {
                    float[] spectrumData = _lineSpectrum.GetSpectrumData(MaxAudioValue);

                    if (spectrumData != null && _receiveAudio != null)
                    {
                        _receiveAudio(spectrumData);
                    }
                }
            };

            _singleBlockNotificationStream.SingleBlockRead += singleBlockNotificationStream_SingleBlockRead;
        }
예제 #30
0
        public ParticleMoveSystem(World world)
        {
            particleSet = world.GetEntities().With <Translation>().With <Velocity>().AsSet();
            capture     = new WasapiLoopbackCapture();
            capture.Initialize();
            var soundInSource = new SoundInSource(capture);
            var source        = soundInSource.ToSampleSource();

            fft = new FftProvider(source.WaveFormat.Channels, fftSize);

            var notificationSource = new SingleBlockNotificationStream(source);

            notificationSource.SingleBlockRead += SingleBlockRead;

            waveSource = notificationSource.ToWaveSource(16);
            buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

            soundInSource.DataAvailable += DataAvailable;

            // capture.DataAvailable += (sender, args) => DataAvailable(sender, args);
            capture.Start();
        }