示例#1
0
 public void Start()
 {
     if (IsRecording)
     {
         _soundIn.Start();
     }
 }
示例#2
0
    /// <summary>
    /// Start Recording
    /// </summary>
    /// <param name="recordAudio">Record Audio</param>
    /// <returns>Successful</returns>
    public bool Start(bool recordAudio = false)
    {
        if (isRecording)
        {
            return(false);
        }

        // Reset
        status  = "Pending";
        _frames = 0;

        // Create Temporary Directory
        CreateTemporaryPath();

        // Setup Audio Recording
        if (recordAudio)
        {
            this.recordAudio           = recordAudio;
            audioSource                = new WasapiLoopbackCapture();
            audioSource.DataAvailable += new EventHandler <DataAvailableEventArgs>(WriteAudio);
            try
            {
                audioSource.Initialize();
            }
            catch (COMException exception)
            {
                if (exception.Message.Contains("0x88890008") && audioSource.WaveFormat.Channels > 2)
                {
                    //this specific exception is most likely caused by "Headphone Virtualization" enabled in device control panel properties
                    var waveFormatTag = (audioSource.WaveFormat.WaveFormatTag == AudioEncoding.Extensible) ? AudioEncoding.IeeeFloat : audioSource.WaveFormat.WaveFormatTag;
                    var waveFormat    = new WaveFormat(audioSource.WaveFormat.SampleRate, audioSource.WaveFormat.BitsPerSample, Math.Min(audioSource.WaveFormat.Channels, 2), waveFormatTag);
                    audioSource = new WasapiLoopbackCapture(0, waveFormat);
                    audioSource.DataAvailable += new EventHandler <DataAvailableEventArgs>(WriteAudio);
                    audioSource.Initialize();
                }
                else
                {
                    throw exception;
                }
            }

            audioFile = new WaveWriter(Path.Combine(tempPath, "audio.wav"), audioSource.WaveFormat);

            audioSource.Start();
        }

        // Start Timers
        StartTimers();

        status      = "Recording";
        isRecording = true;
        return(isRecording);
    }
    public NaudioDemo()
    {
        using (WasapiCapture capture = new WasapiLoopbackCapture()) {
            //if nessesary, you can choose a device here
            //to do so, simply set the device property of the capture to any MMDevice
            //to choose a device, take a look at the sample here: http://cscore.codeplex.com/

            //initialize the selected device for recording
            capture.Initialize();

            //create a wavewriter to write the data to
            using (WaveWriter w = new WaveWriter("dump.wav", capture.WaveFormat)) {
                //setup an eventhandler to receive the recorded data
                capture.DataAvailable += (s, e) =>
                {
                    //save the recorded audio
                    w.Write(e.Data, e.Offset, e.ByteCount);
                };

                //start recording
                capture.Start();

                Thread.Sleep(1000);

                //stop recording
                capture.Stop();
            }
        }
    }
示例#4
0
        public FormMain()
        {
            InitializeComponent();
            Application.ApplicationExit += Application_ApplicationExit;

            // Initialize BeatDetector
            beatDetector = new BeatDetector();
            beatDetector.BeatDetected += BeatDetector_BeatDetected;
            beatDetector.BpmDetected  += BeatDetector_BpmDetected;

            // Initialize WASAPI
            try
            {
                wasapi = new WasapiLoopbackCapture();
                wasapi.DataAvailable += Wasapi_DataAvailable;
                wasapi.Initialize();
                wasapi.Start();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message + "\n\nThe application will now exit.", "WASAPI error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                Environment.Exit(0);
                return;
            }

            Timer t = new Timer();

            t.Tick    += T_Tick;
            t.Interval = 16;
            t.Start();
        }
        public long RunMeasurementRoutine()
        {
            Console.WriteLine($"We will take {c_sampleCount} samples to calculate the end-to-end system latency.");
            Console.WriteLine("Setting up audio devices...");

            InterceptKeys.Instance.OnKey += OnKey;
            InterceptKeys.Start();
            Thread.Sleep(1000);
            _capture.Initialize();
            _capture.DataAvailable += OnData;
            _tracker.Start();
            _capture.Start();

            _output.Initialize(new SampleToIeeeFloat32(new MetronomeGenerator()));
            _output.Play();

            _done.WaitOne();

            float delta = 0;

            for (int i = 0; i < c_sampleCount; i++)
            {
                delta += _keyTicks[i] - _audioTicks[i];
            }
            delta /= c_sampleCount;
            Console.WriteLine($"End-to-end latency: {delta / 10000}ms");
            Thread.Sleep(5000);
            return((long)(delta));
        }
示例#6
0
    public CoreSpectrum(int size = 1024)
    {
        newDataRead = true;

        hamming = new float[size];
        for (int n = 0; n < hamming.Length; n++)
        {
            hamming[n] = FastFourierTransformation.HammingWindowF(n, size);
        }

        capture = new WasapiLoopbackCapture(CAPTURE_DELAY);
        capture.DataAvailable += (s, a) =>
        {
            if (newDataRead)
            {
                UpdateSpectrumData(a.Data);
            }
        };
        capture.Stopped += (c, e) =>
        {
            for (int i = 0; i < spectrumData.Length; i++)
            {
                spectrumData[i] = 0f;
            }
        };
        capture.Initialize();
        capture.Start();

        fftProvider = new FftProvider(STEREO, FftSize.Fft1024);

        spectrumData = new float[size];
    }
        private void CreateLoopback()
        {
            try {
                loopback.Initialize();
            } catch (Exception e) {
                Debug.LogException(e);

                return;
            }

            soundIn          = new SoundInSource(loopback);
            spectrumProvider = new BasicSpectrumProvider(soundIn.WaveFormat.Channels, soundIn.WaveFormat.SampleRate, FftSize.Fft4096);
            spectrum         = new LineSpectrum(FftSize.Fft4096)
            {
                SpectrumProvider = spectrumProvider,
                BarCount         = 512,
                UseAverage       = true,
                IsXLogScale      = true,
            };

            loopback.Start();

            blockNotifyStream = new SingleBlockNotificationStream(soundIn.ToSampleSource());
            realtime          = blockNotifyStream.ToWaveSource();

            buffer = new float[realtime.WaveFormat.BytesPerSecond / sizeof(float) / 2];

            soundIn.DataAvailable += AudioDataAvailable;

            blockNotifyStream.SingleBlockRead += SingleBlockRead;
        }
示例#8
0
        protected override void OnOpen()
        {
            _validated = Context.QueryString["token"] == _token;
            if (!_validated)
            {
                Context.WebSocket.Close(4000);
            }
            _capture = new WasapiLoopbackCapture(0, new WaveFormat());
            _capture.Initialize();
            _capture.Start();
            var wsStream = new WebSocketStream(this);

            Console.WriteLine($"Captured audio format: {_capture.WaveFormat}");
            IWriteable encoder = null;

            switch (_format)
            {
            case AudioFormat.AAC:
                encoder = new AacEncoder(_capture.WaveFormat, wsStream, 128000,
                                         TranscodeContainerTypes.MFTranscodeContainerType_ADTS);
                break;

            case AudioFormat.MP3:
                encoder = MediaFoundationEncoder.CreateMP3Encoder(_capture.WaveFormat, wsStream, 320000);
                break;
            }

            _capture.DataAvailable += (sender, e) => { encoder?.Write(e.Data, e.Offset, e.ByteCount); };
        }
示例#9
0
        private void StartRecord()
        {
            using (WasapiCapture capture = new WasapiLoopbackCapture())
            {
                //if nessesary, you can choose a device here
                //to do so, simply set the device property of the capture to any MMDevice
                //to choose a device, take a look at the sample here: http://cscore.codeplex.com/

                //initialize the selected device for recording
                capture.Initialize();

                //create a wavewriter to write the data to
                using (var stream = new FileStream("dump.wav", FileMode.CreateNew))
                {
                    using (var w = new WaveWriter(stream, capture.WaveFormat))
                    {
                        //setup an eventhandler to receive the recorded data
                        capture.DataAvailable += (s, args) =>
                        {
                            //save the recorded audio
                            w.Write(args.Data, args.Offset, args.ByteCount);
                        };

                        //start recording
                        capture.Start();

                        SpinWait.SpinUntil(() => !_recording);

                        //stop recording
                        capture.Stop();
                    }
                    stream.Flush(true);
                }
            }
        }
示例#10
0
        private static void StartCapture(MMDevice sourceDevice, MMDevice targetDevice)
        {
            var soundIn = new WasapiLoopbackCapture {
                Device = sourceDevice
            };

            soundIn.Initialize();

            var soundOut = new WasapiOut()
            {
                Latency = 100, Device = targetDevice
            };

            soundOut.Initialize(new SoundInSource(soundIn));

            soundIn.Start();
            soundOut.Play();
            while (true)
            {
                if (soundOut.PlaybackState == PlaybackState.Playing)
                {
                    Thread.Sleep(500);
                }
                soundOut.Play();
            }
        }
示例#11
0
        static void Main(string[] args)
        {
            using (var wasapiCapture = new WasapiLoopbackCapture())
            {
                wasapiCapture.Initialize();
                var wasapiCaptureSource = new SoundInSource(wasapiCapture);
                using (var stereoSource = wasapiCaptureSource.ToStereo())
                {
                    //using (var writer = MediaFoundationEncoder.CreateWMAEncoder(stereoSource.WaveFormat, "output.wma"))
                    using (var writer = new WaveWriter("output.wav", stereoSource.WaveFormat))
                    {
                        byte[] buffer = new byte[stereoSource.WaveFormat.BytesPerSecond];
                        wasapiCaptureSource.DataAvailable += (s, e) =>
                        {
                            int read = stereoSource.Read(buffer, 0, buffer.Length);
                            writer.Write(buffer, 0, read);
                        };

                        wasapiCapture.Start();

                        Console.ReadKey();

                        wasapiCapture.Stop();
                    }
                }
            }
        }
示例#12
0
        internal static void RecordToWav(string fileName)
        {
            using (WasapiCapture capture = new WasapiLoopbackCapture())
            {
                //if nessesary, you can choose a device here
                //to do so, simply set the device property of the capture to any MMDevice
                //to choose a device, take a look at the sample here: http://cscore.codeplex.com/

                //initialize the selected device for recording
                capture.Initialize();

                //create a wavewriter to write the data to
                using (WaveWriter w = new WaveWriter(fileName, capture.WaveFormat))
                {
                    //setup an eventhandler to receive the recorded data
                    capture.DataAvailable += (s, e) =>
                    {
                        //save the recorded audio
                        w.Write(e.Data, e.Offset, e.ByteCount);
                        Console.Write(".");
                    };

                    //start recording
                    capture.Start();

                    Console.ReadKey();

                    //stop recording
                    capture.Stop();
                }
            }
        }
示例#13
0
        public void StartCapture()
        {
            if (File.Exists(path))
            {
                File.Delete(path);
            }
            var writer = new WaveFileWriter(path, capture.WaveFormat);

            capture.DataAvailable += (s, a) =>
            {
                writer.Write(a.Buffer, 0, a.BytesRecorded);
            };

            capture.RecordingStopped += (s, a) =>
            {
                writer.Dispose();
                writer = null;
                capture.Dispose();
            };

            WasapiCapture waveLoop = new WasapiLoopbackCapture();

            waveLoop.Initialize();
            waveLoop.DataAvailable += waveLoop_DataAvailable;
            waveLoop.Stopped       += waveLoop_Stopped;
            waveLoop.Start();

            capture.StartRecording();
        }
示例#14
0
        internal static void RecordToWma(string fileName)
        {
            using (var wasapiCapture = new WasapiLoopbackCapture())
            {
                wasapiCapture.Initialize();
                var wasapiCaptureSource = new SoundInSource(wasapiCapture);
                using (var stereoSource = wasapiCaptureSource.ToStereo())
                {
                    using (var writer = MediaFoundationEncoder.CreateWMAEncoder(stereoSource.WaveFormat, fileName))
                    {
                        byte[] buffer = new byte[stereoSource.WaveFormat.BytesPerSecond];
                        wasapiCaptureSource.DataAvailable += (s, e) =>
                        {
                            int read = stereoSource.Read(buffer, 0, buffer.Length);
                            writer.Write(buffer, 0, read);
                            Console.Write(".");
                        };

                        wasapiCapture.Start();

                        Console.ReadKey();

                        wasapiCapture.Stop();
                    }
                }
            }
        }
示例#15
0
        public void InitialiseAudioProgram()
        {
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            var spectrumProvider = new SpectrumProvider(2, 48000, FftSize.Fft4096);

            _spectrum = new LineSpectrum(spectrumProvider, _barCount);
            var notificationSource = new SingleBlockNotificationStream(source);

            notificationSource.SingleBlockRead += (s, a) => spectrumProvider.Add(a.Left, a.Right);

            _source = notificationSource.ToWaveSource(16);

            // Read from the source otherwise SingleBlockRead is never called
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (src, evt) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _soundIn.Start();

            for (int i = 0; i < MatrixCount; i++)
            {
                _Programs[i] = i == 0 ? AudioSequence().GetEnumerator() : null;
            }
        }
示例#16
0
        public async Task <MemoryStream> GetLoopbackAudio(int ms)
        {
            var Stream = new MemoryStream();

            using (WasapiCapture virtualaudiodev =
                       new WasapiLoopbackCapture())
            {
                virtualaudiodev.Initialize();
                var soundInSource = new SoundInSource(virtualaudiodev)
                {
                    FillWithZeros = false
                };
                var convertedSource = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
                using (convertedSource = convertedSource.ToMono())
                {
                    using (var waveWriter = new WaveWriter(Stream, convertedSource.WaveFormat))
                    {
                        soundInSource.DataAvailable += (s, e) =>
                        {
                            var buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                            int read;
                            while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                            {
                                waveWriter.Write(buffer, 0, read);
                            }
                        };
                        virtualaudiodev.Start();
                        Thread.Sleep(ms);
                        virtualaudiodev.Stop();
                    }
                }
            }

            return(Stream);
        }
        public static int Capture(string output_file, int time)
        {
            int sampleRate    = 48000;
            int bitsPerSample = 24;


            //create a new soundIn instance
            using (WasapiCapture soundIn = new WasapiLoopbackCapture())
            {
                //initialize the soundIn instance
                soundIn.Initialize();

                //create a SoundSource around the the soundIn instance
                SoundInSource soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the soundInSource to any other format
                IWaveSource convertedSource = soundInSource
                                              .ChangeSampleRate(sampleRate) // sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(bitsPerSample); //bits per sample

                //channels...
                using (convertedSource = convertedSource.ToStereo())
                {
                    //create a new wavefile
                    using (WaveWriter waveWriter = new WaveWriter(output_file, convertedSource.WaveFormat))
                    {
                        //register an event handler for the DataAvailable event of the soundInSource
                        soundInSource.DataAvailable += (s, e) =>
                        {
                            //read data from the converedSource
                            byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                            int    read;

                            //keep reading as long as we still get some data
                            while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                            {
                                //write the read data to a file
                                waveWriter.Write(buffer, 0, read);
                            }
                        };

                        //start recording
                        soundIn.Start();

                        //delay and keep recording
                        Thread.Sleep(time);

                        //stop recording
                        soundIn.Stop();
                    }
                }
            }
            return(0);
        }
示例#18
0
        public Spectrograph()
        {
            InitializeComponent();

            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource);

            _source = singleBlockNotificationStream.ToWaveSource();

            if (!Directory.Exists(_loopbackDir))
            {
                Directory.CreateDirectory(_loopbackDir);
            }

            _writer = new WaveWriter(_loopbackDir + "/loopback.wav", _source.WaveFormat);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            _lineSpectrumProvider = new BasicSpectrumProvider(_source.WaveFormat.Channels, _source.WaveFormat.SampleRate, fftSize);
            _spectrogramProvider  = new BasicSpectrumProvider(_source.WaveFormat.Channels, _source.WaveFormat.SampleRate, fftSize);

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
            _soundIn.Start();

            _lineSpectrum = new LineSpectrum(fftSize)
            {
                SpectrumProvider = _lineSpectrumProvider,
                UseAverage       = true,
                BarCount         = 22,
                BarSpacing       = 1,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Sqrt
            };
            _oscilloscope = new Oscilloscope();
            _spectrogram  = new Spectrogram(fftSize)
            {
                SpectrumProvider = _spectrogramProvider,
                UseAverage       = true,
                BarCount         = (int)fftSize,
                BarSpacing       = 0,
                IsXLogScale      = true,
                ScalingStrategy  = ScalingStrategy.Sqrt
            };
            _keyboardVisualizer = new KeyboardVisualizer();

            UpdateTimer.Start();
        }
示例#19
0
 public void Run()
 {
     _periodicThread = new Thread(PeriodicCancelThread);
     _periodicThread.Start();
     _tracker.Start();
     _capture.Start();
     InterceptKeys.Instance.OnKey += OnKey;
     InterceptKeys.Start();
 }
示例#20
0
        protected override void OnOpen()
        {
            _validated = Context.QueryString["token"] == _token;
            var capture = new WasapiLoopbackCapture();

            capture.Initialize();
            capture.Start();
            var wsStream = new WebSocketStream(this);
            var encoder  = MediaFoundationEncoder.CreateMP3Encoder(capture.WaveFormat, wsStream);

            capture.DataAvailable += (sender, e) => encoder.Write(e.Data, e.Offset, e.ByteCount);
        }
示例#21
0
        private void FormMain_Load(object sender, EventArgs e)
        {
            mWasapi = new WasapiLoopbackCapture(10);
            mWasapi.DataAvailable += Wasapi_DataAvailable;
            mWasapi.Initialize();
            mWasapi.Start();

            // Status
            var format = mWasapi.Device.DeviceFormat;

            toolStripStatusLabel1.Text = $"WaveFormat: {format.SampleRate}Hz, {format.BitsPerSample} bit, {format.Channels} channel(s)";
        }
示例#22
0
    // Start is called before the first frame update
    void Start()
    {
        loopbackCapture = new WasapiLoopbackCapture();
        loopbackCapture.Initialize();

        soundInSource = new SoundInSource(loopbackCapture);

        fftBuffer = new float[(int)CFftSize];

        basicSpectrumProvider = new BasicSpectrumProvider(soundInSource.WaveFormat.Channels,
                                                          soundInSource.WaveFormat.SampleRate, CFftSize);

        lineSpectrum = new LineSpectrum(CFftSize)
        {
            SpectrumProvider = basicSpectrumProvider,
            BarCount         = numBars,
            UseAverage       = true,
            IsXLogScale      = false,
            ScalingStrategy  = ScalingStrategy.Linear
        };

        var notificationSource = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

        notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

        finalSource = notificationSource.ToWaveSource();

        loopbackCapture.DataAvailable += Capture_DataAvailable;
        loopbackCapture.Start();

        //singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
        //realTimeSource = singleBlockNotificationStream.ToWaveSource();

        //byte[] buffer = new byte[realTimeSource.WaveFormat.BytesPerSecond / 2];

        //soundInSource.DataAvailable += (s, ea) =>
        //{
        //    while (realTimeSource.Read(buffer, 0, buffer.Length) > 0)
        //    {
        //        float[] spectrumData = lineSpectrum.GetSpectrumData(10);
        //        receiveAudio(spectrumData);
        //        Debug.Log(receiveAudio);

        //        if (spectrumData != null && receiveAudio != null)
        //        {
        //            receiveAudio(spectrumData);
        //            Debug.Log(receiveAudio);
        //        }
        //    }
        //};

        //singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
    }
示例#23
0
        private void Form1_Load(object sender, EventArgs e)
        {
            _renderTiming = TimeSpan.FromMilliseconds(1000 / 60.0);

            var enu = new MMDeviceEnumerator();

            var device = enu.GetDefaultAudioEndpoint(DataFlow.Render, Role.Console);

            _capture = new WasapiLoopbackCapture(0, device.DeviceFormat, ThreadPriority.Normal);

            _capture.DataAvailable += DataAvailable;
            _capture.Initialize();
            _capture.Start();

            _waveProvider = new BufferedWaveProvider(new WaveFormat(device.DeviceFormat.SampleRate, device.DeviceFormat.BitsPerSample, device.DeviceFormat.Channels));

            new Thread(() =>
            {
                while (true)
                {
                    var time = DateTime.Now;

                    if (IsHandleCreated && Visible)
                    {
                        FixedUpdate();

                        var waves = GetWaves();

                        if (waves != null && waves.Length > 0)
                        {
                            _waves = waves;

                            BeginInvoke(new MethodInvoker(() =>
                            {
                                var p = PointToClient(Cursor.Position);

                                panel2.Visible = panel2.Enabled = p.X > panel2.Location.X - 5 && p.Y > panel2.Location.Y && p.X < ClientSize.Width && p.Y < ClientSize.Height;

                                Invalidate();
                            }));
                        }
                    }

                    var t = _renderTiming - (DateTime.Now - time);

                    Thread.Sleep(t > TimeSpan.Zero ? t : TimeSpan.Zero);
                }
            })
            {
                IsBackground = true
            }.Start();
        }
示例#24
0
        public static void Init()
        {
            if (_isInited)
            {
                return;
            }

            WasapiCapture = new CSCore.SoundIn.WasapiLoopbackCapture(0);
            WasapiCapture.Initialize();
            WasapiCapture.Start();

            _isInited = true;
        }
示例#25
0
        public Visualizer()
        {
            InitializeComponent();

            _graphics = DrawPanel.CreateGraphics();
            _graphics.SmoothingMode      = SmoothingMode.AntiAlias;
            _graphics.CompositingQuality = CompositingQuality.AssumeLinear;
            _graphics.PixelOffsetMode    = PixelOffsetMode.Default;
            _graphics.TextRenderingHint  = TextRenderingHint.ClearTypeGridFit;
            _graphics.Clear(Color.Black);

            _oscilloscope = new Oscilloscope();

            for (int i = 0; i < _pens.Length; i++)
            {
                _pens[i] = new Pen(Color.FromArgb(i, i, i));
            }

            _fftProvider = new FftProvider(1, FftSize.Fft4096);

            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource);

            _source = singleBlockNotificationStream.ToWaveSource();

            if (!Directory.Exists("%AppData%/Spectrograph"))
            {
                Directory.CreateDirectory("%AppData%/Spectrograph");
            }

            _writer = new WaveWriter("%AppData%/Spectrograph/loopback.wav", _source.WaveFormat);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;

            _soundIn.Start();
        }
示例#26
0
    void StartListen()
    {
        loopbackCapture = new WasapiLoopbackCapture();
        loopbackCapture.Initialize();

        soundInSource = new SoundInSource(loopbackCapture);

        loopbackCapture.Start();

        singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
        realTimeSource = singleBlockNotificationStream.ToWaveSource();

        soundInSource.DataAvailable += DataAvailable;

        singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead;
    }
示例#27
0
        public FormMain()
        {
            Application.ApplicationExit += Application_ApplicationExit;
            InitializeComponent();

            detector = new SoundEnergyDetector();
            detector.BeatDetected += Detector_BeatDetected;

            wasapi = new WasapiLoopbackCapture(10);
            wasapi.DataAvailable += Wasapi_DataAvailable;
            wasapi.Initialize();
            wasapi.Start();

            chart1.ChartAreas[0].AxisY.Maximum = 1;
            chart1.ChartAreas[0].AxisX.Minimum = 0;
            chart1.ChartAreas[0].AxisX.Maximum = detector.WindowSize;
        }
示例#28
0
        public SoundEffect(SoundEffectConfig config) : base(config)
        {
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var sampleSource  = soundInSource.ToSampleSource();

            const FftSize fftSize = FftSize.Fft1024;

            _fftBuffer        = new float[(int)fftSize];
            _spectrumProvider = new SpectrumProvider(sampleSource.WaveFormat.Channels, sampleSource.WaveFormat.SampleRate, fftSize);

            var notificationSource = new DataNotificationSource(sampleSource);

            notificationSource.DataRead += (s, e) => _spectrumProvider.Add(e.Data, e.Data.Length);

            var waveSource = notificationSource.ToWaveSource(16);
            var buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

            soundInSource.DataAvailable += (s, e) => { while (waveSource.Read(buffer, 0, buffer.Length) > 0)
                                                       {
                                                           ;
                                                       }
            };

            _spectrum = new LedSpectrum(GenerateColor)
            {
                FftSize            = fftSize,
                SpectrumProvider   = _spectrumProvider,
                UseAverage         = Config.UseAverage,
                MinimumFrequency   = Config.MinimumFrequency,
                MaximumFrequency   = Config.MaximumFrequency,
                ScalingStrategy    = Config.ScalingStrategy,
                ScalingFactor      = Config.ScalingFactor,
                IsXLogScale        = false,
                SpectrumResolution = (int)fftSize
            };

            _spectrum.UpdateFrequencyMapping();
            _soundIn.Start();
        }
        public static void listen(long ms)
        {
            using (WasapiCapture capture = new WasapiLoopbackCapture())
            {
                //if nessesary, you can choose a device here
                //to do so, simply set the device property of the capture to any MMDevice
                //to choose a device, take a look at the sample here: http://cscore.codeplex.com/

                double time = DateTime.Now.TimeOfDay.TotalMilliseconds;

                //initialize the selected device for recording
                capture.Initialize();

                //create a wavewriter to write the data to
                using (WaveWriter w = new WaveWriter("dance_r.wav", capture.WaveFormat))
                {
                    bool caught = false;
                    //setup an eventhandler to receive the recorded data
                    capture.DataAvailable += (s, e) =>
                    {
                        //save the recorded audio

                        Console.WriteLine(e.ByteCount);

                        caught = true;

                        //w.Write(e.Data, e.Offset, e.ByteCount);
                    };

                    //start recording
                    capture.Start();

                    while (!caught)
                    {
                    }

                    //stop recording
                    capture.Stop();
                }
            }
        }
示例#30
0
        public void StartListen()
        {
            _loopbackCapture = new WasapiLoopbackCapture();
            _loopbackCapture.Initialize();

            _soundInSource = new SoundInSource(_loopbackCapture);

            _basicSpectrumProvider = new BasicSpectrumProvider(_soundInSource.WaveFormat.Channels, _soundInSource.WaveFormat.SampleRate, CFftSize);

            _lineSpectrum = new LineSpectrum(CFftSize)
            {
                SpectrumProvider = _basicSpectrumProvider,
                BarCount         = _spectrumSize,
                UseAverage       = true,
                IsXLogScale      = true,
                ScalingStrategy  = _scalingStrategy
            };

            _loopbackCapture.Start();

            _singleBlockNotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource());
            _realtimeSource = _singleBlockNotificationStream.ToWaveSource();

            byte[] buffer = new byte[_realtimeSource.WaveFormat.BytesPerSecond / 2];

            _soundInSource.DataAvailable += (s, ea) =>
            {
                while (_realtimeSource.Read(buffer, 0, buffer.Length) > 0)
                {
                    float[] spectrumData = _lineSpectrum.GetSpectrumData(MaxAudioValue);

                    if (spectrumData != null && _receiveAudio != null)
                    {
                        _receiveAudio(spectrumData);
                    }
                }
            };

            _singleBlockNotificationStream.SingleBlockRead += singleBlockNotificationStream_SingleBlockRead;
        }