/// <summary>
        /// Runs the main loop until "exit" or "quit" is heard.
        /// </summary>
        /// <param name="cancellationToken"></param>
        private async Task RunAsync(CancellationToken cancellationToken)
        {
            StartListeningOnLoopback();


            Console.WriteLine("Listening");
            _soundIn.Start();
            _soundIn.Stopped += (sender, args) =>
            {
                Console.WriteLine("Mic stopped");
            };



            while (true && !cancellationToken.IsCancellationRequested)
            {
                await MaybeStartStreamAsync();

                // ProcessResponses will return false if it hears "exit" or "quit".
                var result = await ProcessResponses();

                if (result == false)
                {
                    Console.WriteLine("User Exited");

                    return;
                }
                await TransferMicrophoneChunkAsync();
            }
        }
コード例 #2
0
        public virtual void Initialize()
        {
            _wasapiCapture = new WasapiCapture
            {
                Device = _captureDevice
            };
            _wasapiCapture.Initialize();

            var soundInSource = new SoundInSource(_wasapiCapture);

            if (_triggerSingleBlockRead)
            {
                var notificationStream =
                    new SingleBlockNotificationStream(soundInSource.ChangeSampleRate(48000).ToMono().ToSampleSource());
                notificationStream.SingleBlockRead += NotificationStreamOnSingleBlockRead;
                _captureSource = notificationStream.ToWaveSource(16);
            }
            else
            {
                _captureSource = soundInSource
                                 .ChangeSampleRate(48000)
                                 .ToMono()
                                 .ToSampleSource()
                                 .ToWaveSource(16);
            }

            soundInSource.DataAvailable += SoundInSourceOnDataAvailable;
            _wasapiCapture.Start();
        }
コード例 #3
0
 /// <summary>
 /// Creates a new stream and starts the recording.
 /// </summary>
 public async Task GetNextRecording()
 {
     mSttStream = mSttClient.CreateStream();
     mAudioCapture.Start();
     Console.ReadLine();
     await StopRecordingAsync();
 }
コード例 #4
0
        private void btnStartStop_Click(object sender, EventArgs e)
        {
            if (!recording)
            {
                btnStartStop.Text = "Stop";

                writer = new WaveWriter(textOutputFile.Text, capture.WaveFormat);
                capture.Start();
                recording = true;

                recordTime              = new TimeSpan();
                recordTimer.Enabled     = true;
                btnSelectDevice.Enabled = false;

                Console.WriteLine("Started recording");
            }
            else
            {
                btnStartStop.Text = "Start!";

                capture.Stop();
                writer.Dispose();
                recording = false;

                recordTimer.Enabled     = false;
                btnSelectDevice.Enabled = true;

                Console.WriteLine("Stopped recording");
            }
        }
コード例 #5
0
ファイル: MainGame.cs プロジェクト: MackinnonBuck/real-vis
        /// <summary>
        /// Initializes the visualizer and audio capture.
        /// </summary>
        protected override void Initialize()
        {
            IsMouseVisible = true;

            graphics.HardwareModeSwitch = false;

            graphics.PreferredBackBufferWidth = ViewportWidth;
            graphics.PreferredBackBufferHeight = ViewportHeight;
            //graphics.IsFullScreen = true;
            graphics.ApplyChanges();

            viewportAdapter = new BoxingViewportAdapter(Window, GraphicsDevice, ViewportWidth, ViewportHeight);
            Camera = new Camera2D(viewportAdapter);

            soundIn = new WasapiLoopbackCapture();
            //soundIn = new WasapiCapture();
            soundIn.Initialize();

            SoundInSource inSource = new SoundInSource(soundIn);
            ISampleSource sampleSource = inSource.ToSampleSource();

            SetupSampleSource(sampleSource);

            byte[] buffer = new byte[source.WaveFormat.BytesPerSecond / 2];

            inSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = source.Read(buffer, 0, buffer.Length)) > 0) ;
            };

            soundIn.Start();

            base.Initialize();
        }
コード例 #6
0
        public AudioCapture(int sampleRate, int sampleSize)
        {
            this.sampleRate = sampleRate;
            this.sampleSize = sampleSize;

            if (sampleSize <= 0)
            {
                throw new ArgumentException("Sample size must be > 0, instead it is " + sampleSize);
            }

            resSamples = new float[this.sampleSize];
            var ayy = new MMDeviceEnumerator();

            // This uses the wasapi api to get any sound data played by the computer
            capture        = new WasapiCapture(false, AudioClientShareMode.Shared);
            capture.Device = ayy.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Multimedia);
            capture.Initialize();
            capture.DataAvailable += Capture_DataAvailable;

            IWaveSource source = new SoundInSource(capture);


            dataSource             = new PureDataSource(new WaveFormat(sampleRate, 8, 1), source.ToSampleSource());
            dataSource.OnDataRead += DataSource_OnDataRead;

            finalSource = dataSource.ToWaveSource();

            capture.Start();
        }
        private void button_start_Click(object sender, EventArgs e)
        {
            wavein        = null;
            wavein        = new WasapiCapture(false, AudioClientShareMode.Exclusive, 5);
            wavein.Device = inputDevices[comboBox_mic.SelectedIndex];
            wavein.Initialize();
            wavein.Start();

            source = new SoundInSource(wavein)
            {
                FillWithZeros = true
            };
            //add my special effects in the chain
            efxProcs             = new EfxProcs(source.ToSampleSource().ToMono());
            efxProcs.gain        = linearGain; //keep track of this changing value
            efxProcs.pitchFactor = pitchShift; //keep track of pitch

            waveout        = null;
            waveout        = new WasapiOut(false, AudioClientShareMode.Exclusive, 5);
            waveout.Device = outputDevices[comboBox_speaker.SelectedIndex];
            waveout.Initialize(efxProcs.ToWaveSource()); //source.ToSampleSource().ToWaveSource());//
            waveout.Play();
            //CSCore.Streams.SampleConverter.SampleToIeeeFloat32 sampleToIeee = new CSCore.Streams.SampleConverter.SampleToIeeeFloat32(source.ToSampleSource());
            timer1.Enabled = true;
        }
コード例 #8
0
ファイル: WaveForm.cs プロジェクト: xue-blood/WaveForm
        private void OpenDefault()
        {
            Stop();

            //open the default device
            _soundIn = new WasapiLoopbackCapture();
            //Our loopback capture opens the default render device by default so the following is not needed
            //_soundIn.Device = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };


            //play the audio
            _soundIn.Start();

            timer.Start();
        }
コード例 #9
0
        private void button_Click(object sender, RoutedEventArgs e)
        {
            MMDevice dev = (MMDevice)comboBox.SelectedItem;

            if (mmdevicesOut.Contains(dev))
            {
                capture = new WasapiLoopbackCapture();
            }
            else
            {
                capture = new WasapiCapture();
            }
            capture.Device = dev;

            capture.Initialize();

            w = new WasapiOut();

            w.Device = (MMDevice)comboBox_Copy.SelectedItem;

            w.Initialize(new SoundInSource(capture)
            {
                FillWithZeros = true
            });

            capture.Start();
            w.Play();
        }
コード例 #10
0
        // Helper for State.Looking
        void StartCapture()
        {
            Debug.Assert(m_State == State.Looking);
            Debug.Assert(m_AudioCapture != null);

            // TODO: This starts as a WaveSource (raw bytes), converts to floats
            // so we can notify once for each sample.
            // The SingleBlockNotificationStream is very garbagey; we should use our own
            // wrapper that grabs all the samples read and pushes them into m_HotValues
            // en masse instead of one-at-a-time.
            var soundInSource = new SoundInSource(m_AudioCapture);
            var sampleSource  = soundInSource.ToSampleSource();
            var singleBlockNotificationStream = new SingleBlockNotificationStream(sampleSource);

            m_FinalSource = singleBlockNotificationStream;

            // Consume and discard any bytes when they come in. We do this for
            // its side effects (firing the SingleBlockNotificationStream event).
            // buffer is closed-over by the lambda.
            float[] buffer = new float[m_FinalSource.WaveFormat.BytesPerSecond / 4];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                do
                {
                    read = m_FinalSource.Read(buffer, 0, buffer.Length);
                } while (read > 0);
            };

            singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;
            m_AudioCapture.Start();
        }
コード例 #11
0
        public void SoundInToSoundOutTest_Wasapi()
        {
            for (int i = 0; i < 10; i++)
            {
                var waveIn = new WasapiCapture();
                waveIn.Initialize();
                waveIn.Start();

                var waveInToSource = new SoundInSource(waveIn)
                {
                    FillWithZeros = true
                };

                var soundOut = new WasapiOut();
                soundOut.Initialize(waveInToSource);
                soundOut.Play();

                Thread.Sleep(2000);

                Assert.AreEqual(PlaybackState.Playing, soundOut.PlaybackState);

                soundOut.Dispose();
                waveIn.Dispose();
            }
        }
コード例 #12
0
            public SoundCapture()
            {
                // This uses the wasapi api to get any sound data played by the computer
                capture = new WasapiLoopbackCapture();

                capture.Initialize();

                // Get our capture as a source
                IWaveSource source = new SoundInSource(capture);


                // From https://github.com/filoe/cscore/blob/master/Samples/WinformsVisualization/Form1.cs

                // This is the typical size, you can change this for higher detail as needed
                fftSize = FftSize.Fft4096;

                // Actual fft data
                fftBuffer = new float[(int)fftSize];


                // Tells us when data is available to send to our spectrum
                var notificationSource = new SingleBlockNotificationStream(source.ToSampleSource());

                notificationSource.SingleBlockRead += NotificationSource_SingleBlockRead;

                // We use this to request data so it actualy flows through (figuring this out took forever...)
                finalSource = notificationSource.ToWaveSource();

                capture.DataAvailable += Capture_DataAvailable;
                capture.Start();
            }
コード例 #13
0
    void Start()
    {
        fftData = new float[fftSize];

        persistentSamples = new FixedQueue <float> [PersSampleUpperIndex - PersSampleLowerIndex];
        smoothedSamples   = new float[persistentSamples.Length];
        for (int i = 0; i < persistentSamples.Length; i++)
        {
            persistentSamples[i] = new FixedQueue <float>(PersistenSampleLength);
        }

        line        = GetComponent <LineRenderer>();
        leftChannel = new float[TotalSamples];

        capture = new WasapiLoopbackCapture();
        capture.Initialize();
        var soundInSource = new SoundInSource(capture);
        var source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out pitchShifter);

        fft1 = new FftTransform(source.WaveFormat.Channels, fftSize);
        fft2 = new FftProvider(source.WaveFormat.Channels, FftSize.Fft2048);

        stream = new SingleBlockNotificationStream(pitchShifter);
        stream.SingleBlockRead += SingleBlockRead;

        waveSource = stream.ToWaveSource(16);
        buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

        soundInSource.DataAvailable += DataAvailable;

        capture.DataAvailable += (sender, args) => DataAvailable(sender, args);
        capture.Start();
    }
コード例 #14
0
        public void StartRecordingSetDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                MessageBox.Show(Properties.Strings.MessageBox_NoRecordingDevices);
                Console.WriteLine("No devices found.");
                return;
            }

            soundIn = new CSCore.SoundIn.WasapiLoopbackCapture
            {
                Device = recordingDevice
            };

            soundIn.Initialize();
            soundInSource = new SoundInSource(soundIn)
            {
                FillWithZeros = false
            };
            convertedSource              = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
            convertedSource              = convertedSource.ToStereo();
            soundInSource.DataAvailable += OnDataAvailable;
            soundIn.Start();

            var format = convertedSource.WaveFormat;

            waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
        }
コード例 #15
0
        public Visualization()
        {
            new Thread(() =>
            {
                Stop();

                //open the default device
                _soundIn = new WasapiLoopbackCapture();
                _soundIn.Initialize();

                var soundInSource = new SoundInSource(_soundIn);

                SetupSampleSource(soundInSource.ToSampleSource());

                // We need to read from our source otherwise SingleBlockRead is never called and our spectrum provider is not populated
                byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
                soundInSource.DataAvailable += (s, aEvent) =>
                {
                    int read;
                    while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                    {
                        ;
                    }
                };

                _soundIn.Start(); //play the audio

                _Timer.Elapsed += new ElapsedEventHandler(GenerateEvent);
                _Timer.Start();
            }).Start();
        }
コード例 #16
0
        private void StartCapture()
        {
            if (SelectedDevice == null)
            {
                return;
            }

            if (CaptureMode == "Capture")
            {
                _soundIn = new WasapiCapture();
            }

            _soundIn.Device = SelectedDevice;
            _soundIn.Initialize();

            var soundInSource = new SoundInSource(_soundIn);
            var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());

            _finalSource = singleBlockNotificationStream.ToWaveSource();
            _writer      = new WaveWriter("tmp.wav", _finalSource.WaveFormat);

            byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, e) =>
            {
                int read;
                while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    _writer.Write(buffer, 0, read);
                }
            };

            //singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead; // visualization

            _soundIn.Start();
        }
コード例 #17
0
ファイル: CSCoreHandler.cs プロジェクト: lopea/VJTools
        //most of this code is stolen from the example in the CSCore github so idk what it does 40% of the time
        public void Initialize(FFTSize _size = FFTSize._4096)
        {
            size     = _size;
            _soundIn = new WasapiLoopbackCapture();

            _soundIn.Initialize();
            var soundInSource = new SoundInSource(_soundIn);

            var source = soundInSource.ToSampleSource();

            _fft = new FftProvider(source.WaveFormat.Channels, (FftSize)size);

            var n = new SingleBlockNotificationStream(source);

            n.SingleBlockRead += (s, a) => _fft.Add(a.Left, a.Right);

            _source = n.ToWaveSource(16);
            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };
            _soundIn.Start();
        }
コード例 #18
0
ファイル: Program.cs プロジェクト: namse/MuGeonGi
        static void Test()
        {
            var soundIn = new WasapiCapture();

            soundIn.Initialize();
            var soundInSource = new RealTimeSoundInSource(soundIn);

            var soundOut = new WasapiOut();

            soundOut.Initialize(soundInSource);
            soundIn.Start();
            soundOut.Play();

            soundOut.Stopped += (s, e) =>
            {
                Console.WriteLine("I'm dead but not dead, P.P.A.P");
                Task.Run(() =>
                {
                    soundOut.Play();
                });
            };

            while (true)
            {
                Console.ReadLine();
            }
        }
コード例 #19
0
        private void GetCapture(bool isController, string deviceId = null)
        {
            if (!isController)
            {
                if (_soundCapture != null)
                {
                    _soundCapture?.Stop();
                    _soundCapture?.Dispose();
                }
                using (MMDeviceEnumerator enumerator = new MMDeviceEnumerator())
                {
                    //using (MMDevice device = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Communications))
                    //{
                    MMDevice device;
                    if (deviceId != null)
                    {
                        device = enumerator.GetDevice(deviceId);
                    }
                    else
                    {
                        device = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, _currentCaptureRole);
                    }

                    _meter        = AudioMeterInformation.FromDevice(device);
                    _soundCapture = new WasapiCapture(true, AudioClientShareMode.Shared, 250)
                    {
                        Device = device
                    };
                    _soundCapture.Initialize();
                    _soundCapture.Start();
                    //}
                }
            }
        }
コード例 #20
0
        /// <summary>
        /// Begin the audio input
        /// </summary>
        public static void InitAudioSource(MMDevice device)
        {
            Stop();

            //open default audio device
            m_SoundIn = new WasapiLoopbackCapture();

            m_SoundIn.Device = device;
            m_SoundIn.Initialize();

            var           soundInSource = new SoundInSource(m_SoundIn);
            ISampleSource source        = soundInSource.ToSampleSource();

            SetupSampleSource(source);

            byte[] buffer = new byte[m_Source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = m_Source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            m_SoundIn.Start();

            MainWindow.StartTimer();
        }
コード例 #21
0
ファイル: WasapiRecorder.cs プロジェクト: sndnv/noisecluster
 /// <summary>
 /// Starts the audio capture.
 /// </summary>
 /// <exception cref="InvalidOperationException">if the audio capture was already started or if no data handlers were supplied</exception>
 public void Start()
 {
     if (_hasHandler)
     {
         if (Interlocked.CompareExchange(ref _isRunning, 1, 0) == 0)
         {
             _log.InfoFormat(
                 "Starting audio capture with formats [{0}] -> [{1}]",
                 _soundInSource.WaveFormat,
                 _convertedSource.WaveFormat
                 );
             _capture.Start();
         }
         else
         {
             var message = string.Format(
                 "Cannot start audio capture with formats [{0}] -> [{1}]; capture is already active",
                 _soundInSource.WaveFormat,
                 _convertedSource.WaveFormat
                 );
             _log.Warn(message);
             throw new InvalidOperationException(message);
         }
     }
     else
     {
         throw new InvalidOperationException("Cannot start capture without at least one data handler");
     }
 }
コード例 #22
0
        public void CSCoreAudioRecording()
        {
            using (capture = new WasapiLoopbackCapture())
            {
                aTimer          = new Timer();
                aTimer.Elapsed += new ElapsedEventHandler(OnTimedEvent); //jak czas minie, wyłącz nagrywanie
                aTimer.Interval = 8000;                                  //czas nagrywania

                //inicjalizacja urządzenia do nagrywania
                capture.Initialize();

                using (writer = new WaveWriter("dump.wav", capture.WaveFormat))
                {
                    capture.DataAvailable += (s, e) =>
                    {
                        //save the recorded audio
                        writer.Write(e.Data, e.Offset, e.ByteCount);
                    };
                    //start recording
                    capture.Start();
                    aTimer.Enabled = true;
                    Console.WriteLine("Rozpoczęto nagrywanie.");
                    Console.ReadKey();
                }
            }
        }
コード例 #23
0
        static void Main(string[] args)

        {
            MMDevice dev = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);

            capture        = new WasapiLoopbackCapture();
            capture.Device = dev;
            capture.Initialize();

            SoundInSource soundInSource = new SoundInSource(capture);

            nStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource());
            final   = nStream.ToWaveSource();
            nStream.SingleBlockRead     += NStream_SingleBlockRead;
            soundInSource.DataAvailable += encode;
            trashBuf = new byte[final.WaveFormat.BytesPerSecond / 2];

            Console.WriteLine($"sample rate:{capture.WaveFormat.SampleRate}");
            Console.WriteLine($"bits per sample:{capture.WaveFormat.BitsPerSample }");
            Console.WriteLine($"channels:{capture.WaveFormat.Channels }");
            Console.WriteLine($"bytes per sample:{capture.WaveFormat.BytesPerSample }");
            Console.WriteLine($"bytes per second:{capture.WaveFormat.BytesPerSecond }");
            Console.WriteLine($"AudioEncoding:{capture.WaveFormat.WaveFormatTag  }");


            EncodingContext context = FrameEncoder.GetDefaultsContext();

            context.Channels        = 6;
            context.SampleRate      = capture.WaveFormat.SampleRate;
            context.AudioCodingMode = AudioCodingMode.Front3Rear2;
            context.HasLfe          = true;
            context.SampleFormat    = A52SampleFormat.Float;
            enc = new FrameEncoderFloat(ref context);

            //_writer = new WaveWriter("test.ac3", final.WaveFormat);


            capture.Start();

            wBuffSrc = new WriteableBufferingSource(new WaveFormat(capture.WaveFormat.SampleRate, capture.WaveFormat.BitsPerSample, capture.WaveFormat.Channels, AudioEncoding.WAVE_FORMAT_DOLBY_AC3_SPDIF), (int)capture.WaveFormat.MillisecondsToBytes(20));

            w = new WasapiOut2(false, AudioClientShareMode.Shared, 20);

            w.Device = MMDeviceEnumerator.EnumerateDevices(DataFlow.Render, DeviceState.Active).Where(x => x.FriendlyName.Contains("Digital")).Single();
            AudioClient a = AudioClient.FromMMDevice(w.Device);

            w.Initialize(wBuffSrc);
            w.Play();


            Task.Run(async() => await encoderThread());
            //encodeSinus();

            Console.ReadLine();

            System.Environment.Exit(0);
        }
 private void comboBox_mic_SelectedIndexChanged(object sender, EventArgs e)
 {
     if (comboBox_mic.SelectedIndex != 0)
     {
         wavein.Device = inputDevices[comboBox_mic.SelectedIndex];
         wavein.Initialize();
         wavein.Start();
     }
 }
コード例 #25
0
        public Class1()
        {
            //create a new soundIn instance
            var soundIn = new WasapiCapture();

            //optional: set some properties
            //soundIn.Device = ...
            //...

            soundIn.Device = new DeviceService().InputDevices().First();

            //initialize the soundIn instance
            soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            SoundInSource soundInSource = new SoundInSource(soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            IWaveSource convertedSource = soundInSource
                                          .ToStereo()             //2 channels (for example)
                                          .ChangeSampleRate(8000) // 8kHz sample rate
                                          .ToSampleSource()
                                          .ToWaveSource(16);      //16 bit pcm

            //register an event handler for the DataAvailable event of
            //the soundInSource
            //Important: use the DataAvailable of the SoundInSource
            //If you use the DataAvailable event of the ISoundIn itself
            //the data recorded by that event might won't be available at the
            //soundInSource yet
            soundInSource.DataAvailable += (s, e) =>
            {
                //read data from the converedSource
                //important: don't use the e.Data here
                //the e.Data contains the raw data provided by the
                //soundInSource which won't have your target format
                byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                int    read;

                //keep reading as long as we still get some data
                //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    //your logic follows here
                    //for example: stream.Write(buffer, 0, read);
                }
            };

            //we've set everything we need -> start capturing data
            soundIn.Start();
        }
コード例 #26
0
        protected override void OnInitializeDevice()
        {
            base.OnInitializeDevice();

            _capture = CreateCapture();
            _capture.DataAvailable += OnCaptureDataAvailable;
            _capture.Initialize();
            _capture.Start();
        }
コード例 #27
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Stop();
            _soundIn = new WasapiLoopbackCapture();
            _soundIn.Initialize();

            var           soundInSource = new SoundInSource(_soundIn);
            ISampleSource source        = soundInSource.ToSampleSource().AppendSource(x => new PitchShifter(x), out _pitchShifter);

            SetupSampleSource(source);

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            soundInSource.DataAvailable += (s, aEvent) =>
            {
                int read;
                while ((read = _source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _soundIn.Start();
            try
            {
                MSI = new Lighting(Allowed);
            }
            catch (MSIRGB.Lighting.Exception ex)
            {
                if (ex.GetErrorCode() == Lighting.ErrorCode.DriverLoadFailed)
                {
                    MessageBox.Show("Please run program as administrator.");
                    Close();
                }
                else if (ex.GetErrorCode() == Lighting.ErrorCode.MotherboardModelNotSupported)
                {
                    if (MessageBox.Show("Your motherboard not supported but it will work, Application will run at your own risk", "MSI Magic Light", MessageBoxButtons.OKCancel) == DialogResult.OK)
                    {
                        Allowed = true;
                    }
                    else
                    {
                        Close();
                    }
                }
                else if (ex.GetErrorCode() == Lighting.ErrorCode.MotherboardVendorNotSupported)
                {
                    MessageBox.Show("Your motherboard not supported at all, We are sorry :(");
                    this.Close();
                }
                else
                {
                    MessageBox.Show("Unknown Error, Please report it on github");
                    this.Close();
                }
            }
        }
コード例 #28
0
        void StartCapturingAndHold(WasapiCapture capture)
        {
            capture.Start();
#if DEBUG
            Console.WriteLine("Start Capturing...");
            Console.WriteLine("Input Format: " + capture.WaveFormat.ToString());
#endif
            _ = Console.ReadKey();
            capture.Stop();
        }
コード例 #29
0
        /// <summary>
        /// Start recording on the device in the parameter.
        /// </summary>
        /// <param name="recordingDevice">the device to start recording</param>
        /// <returns>true if the recording is started, or false</returns>
        public bool StartRecordingSetDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                logger.Log(Properties.Strings.MessageBox_NoRecordingDevices);
                return(false);
            }

            try
            {
                soundIn = new CSCore.SoundIn.WasapiLoopbackCapture
                {
                    Device = recordingDevice
                };

                soundIn.Initialize();
                soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };
                convertedSource              = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
                convertedSource              = convertedSource.ToStereo();
                soundInSource.DataAvailable += OnDataAvailable;
                soundIn.Stopped             += OnRecordingStopped;
                soundIn.Start();

                var format = convertedSource.WaveFormat;
                waveFormat     = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                isRecording    = true;
                bufferCaptured = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };
                bufferSend = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };

                eventThread = new Thread(EventThread)
                {
                    Name         = "Loopback Event Thread",
                    IsBackground = true
                };
                eventThread.Start(new WeakReference <LoopbackRecorder>(this));

                return(true);
            }
            catch (Exception ex)
            {
                logger.Log(ex, "Error initializing the recording device:");
            }

            return(false);
        }
コード例 #30
0
        public static async Task RecordSample()
        {
            //create a new soundIn instance
            var soundIn = new WasapiCapture();

            //optional: set some properties
            //soundIn.Device = ...
            //...
            soundIn.Device = new DeviceService().InputDevices().First();
            soundIn.Initialize();

            var waveWriter = new WaveWriter(@"C:\Users\Cedric Lampron\Desktop\Test Record\dump.wav", soundIn.WaveFormat);;

            await Task.Run(() =>
            {
                //create a SoundSource around the the soundIn instance
                //this SoundSource will provide data, captured by the soundIn instance
                var soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the
                //soundInSource to any other format
                //in this case the "Fluent"-extension methods are being used
                IWaveSource convertedSource = soundInSource
                                              .ToStereo()             //2 channels (for example)
                                              .ChangeSampleRate(8000) // 8kHz sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(16);      //16 bit pcm

                //register an event handler for the DataAvailable event of
                //the soundInSource
                //Important: use the DataAvailable of the SoundInSource
                //If you use the DataAvailable event of the ISoundIn itself
                //the data recorded by that event might won't be available at the
                //soundInSource yet
                soundInSource.DataAvailable += (s, e) =>
                {
                    waveWriter.Write(e.Data, e.Offset, e.ByteCount);
                };

                //we've set everything we need -> start capturing data
                soundIn.Start();
            });

            await Task.Delay(5000);

            soundIn.Stop();
            waveWriter.Dispose();
            waveWriter = null;
            soundIn.Dispose();
            soundIn = null;
        }