Пример #1
0
        static void Main(string[] args)
        {
            using (var wasapiCapture = new WasapiLoopbackCapture())
            {
                wasapiCapture.Initialize();
                var wasapiCaptureSource = new SoundInSource(wasapiCapture);
                using (var stereoSource = wasapiCaptureSource.ToStereo())
                {
                    //using (var writer = MediaFoundationEncoder.CreateWMAEncoder(stereoSource.WaveFormat, "output.wma"))
                    using (var writer = new WaveWriter("output.wav", stereoSource.WaveFormat))
                    {
                        byte[] buffer = new byte[stereoSource.WaveFormat.BytesPerSecond];
                        wasapiCaptureSource.DataAvailable += (s, e) =>
                        {
                            int read = stereoSource.Read(buffer, 0, buffer.Length);
                            writer.Write(buffer, 0, read);
                        };

                        wasapiCapture.Start();

                        Console.ReadKey();

                        wasapiCapture.Stop();
                    }
                }
            }
        }
Пример #2
0
        internal static void RecordToWma(string fileName)
        {
            using (var wasapiCapture = new WasapiLoopbackCapture())
            {
                wasapiCapture.Initialize();
                var wasapiCaptureSource = new SoundInSource(wasapiCapture);
                using (var stereoSource = wasapiCaptureSource.ToStereo())
                {
                    using (var writer = MediaFoundationEncoder.CreateWMAEncoder(stereoSource.WaveFormat, fileName))
                    {
                        byte[] buffer = new byte[stereoSource.WaveFormat.BytesPerSecond];
                        wasapiCaptureSource.DataAvailable += (s, e) =>
                        {
                            int read = stereoSource.Read(buffer, 0, buffer.Length);
                            writer.Write(buffer, 0, read);
                            Console.Write(".");
                        };

                        wasapiCapture.Start();

                        Console.ReadKey();

                        wasapiCapture.Stop();
                    }
                }
            }
        }
Пример #3
0
        /// <summary>
        /// Create a new file based on the given filename and start recording to it.
        /// Filename must include its full path.
        /// </summary>
        /// <param name="fileName">The name of a file to be created. Include its full path</param>
        /// <param name="codec">The codec to record in</param>
        /// <param name="bitRate">The bitrate of the file</param>
        /// <param name="channels">The channels to record</param>
        public void StartCapture(string fileName, AvailableCodecs codec, int bitRate, Channels channels)
        {
            if (!ReadyToRecord())
            {
                throw new NullReferenceException("There is no SoundInSource configured for the recorder.");
            }

            fileName = $"{fileName}.{codec.ToString().ToLower()}";

            WaveFormat waveSource;

            switch (channels)
            {
            case Channels.Mono:
                waveSource = _soundInSource.ToMono().WaveFormat;
                break;

            case Channels.Stereo:
                waveSource = _soundInSource.ToStereo().WaveFormat;
                break;

            default:
                throw new ArgumentException("The selected channel option could not be found.");
            }

            switch (codec)
            {
            case AvailableCodecs.MP3:
                _writer = MediaFoundationEncoder.CreateMP3Encoder(waveSource, fileName, bitRate);
                break;

            case AvailableCodecs.AAC:
                _writer = MediaFoundationEncoder.CreateAACEncoder(waveSource, fileName, bitRate);
                break;

            case AvailableCodecs.WMA:
                _writer = MediaFoundationEncoder.CreateWMAEncoder(waveSource, fileName, bitRate);
                break;

            case AvailableCodecs.WAV:
                _writer = new WaveWriter(fileName, waveSource);
                break;

            default:
                throw new ArgumentException("The specified codec was not found.");
            }

            byte[] buffer = new byte[waveSource.BytesPerSecond];

            _soundInSource.DataAvailable += (s, e) =>
            {
                int read = _waveStream.Read(buffer, 0, buffer.Length);
                _writer.Write(buffer, 0, read);
            };

            // Start recording
            _soundInSource.SoundIn.Start();
            _state = RecordingState.Recording;
        }
Пример #4
0
        public Class1()
        {
            //create a new soundIn instance
            var soundIn = new WasapiCapture();

            //optional: set some properties
            //soundIn.Device = ...
            //...

            soundIn.Device = new DeviceService().InputDevices().First();

            //initialize the soundIn instance
            soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            SoundInSource soundInSource = new SoundInSource(soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            IWaveSource convertedSource = soundInSource
                                          .ToStereo()             //2 channels (for example)
                                          .ChangeSampleRate(8000) // 8kHz sample rate
                                          .ToSampleSource()
                                          .ToWaveSource(16);      //16 bit pcm

            //register an event handler for the DataAvailable event of
            //the soundInSource
            //Important: use the DataAvailable of the SoundInSource
            //If you use the DataAvailable event of the ISoundIn itself
            //the data recorded by that event might won't be available at the
            //soundInSource yet
            soundInSource.DataAvailable += (s, e) =>
            {
                //read data from the converedSource
                //important: don't use the e.Data here
                //the e.Data contains the raw data provided by the
                //soundInSource which won't have your target format
                byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                int    read;

                //keep reading as long as we still get some data
                //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    //your logic follows here
                    //for example: stream.Write(buffer, 0, read);
                }
            };

            //we've set everything we need -> start capturing data
            soundIn.Start();
        }
Пример #5
0
        public static async Task RecordSample()
        {
            //create a new soundIn instance
            var soundIn = new WasapiCapture();

            //optional: set some properties
            //soundIn.Device = ...
            //...
            soundIn.Device = new DeviceService().InputDevices().First();
            soundIn.Initialize();

            var waveWriter = new WaveWriter(@"C:\Users\Cedric Lampron\Desktop\Test Record\dump.wav", soundIn.WaveFormat);;

            await Task.Run(() =>
            {
                //create a SoundSource around the the soundIn instance
                //this SoundSource will provide data, captured by the soundIn instance
                var soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the
                //soundInSource to any other format
                //in this case the "Fluent"-extension methods are being used
                IWaveSource convertedSource = soundInSource
                                              .ToStereo()             //2 channels (for example)
                                              .ChangeSampleRate(8000) // 8kHz sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(16);      //16 bit pcm

                //register an event handler for the DataAvailable event of
                //the soundInSource
                //Important: use the DataAvailable of the SoundInSource
                //If you use the DataAvailable event of the ISoundIn itself
                //the data recorded by that event might won't be available at the
                //soundInSource yet
                soundInSource.DataAvailable += (s, e) =>
                {
                    waveWriter.Write(e.Data, e.Offset, e.ByteCount);
                };

                //we've set everything we need -> start capturing data
                soundIn.Start();
            });

            await Task.Delay(5000);

            soundIn.Stop();
            waveWriter.Dispose();
            waveWriter = null;
            soundIn.Dispose();
            soundIn = null;
        }
Пример #6
0
        public static void StartRecording(String fileName, int bitRate = 192000)
        {
            capture = new WasapiLoopbackCapture();

            capture.Initialize();

            wasapiCaptureSource = new SoundInSource(capture);
            stereoSource        = wasapiCaptureSource.ToStereo();

            switch (System.IO.Path.GetExtension(fileName))
            {
            case ".mp3":
                encoderWriter = MediaFoundationEncoder.CreateMP3Encoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".wma":
                encoderWriter = MediaFoundationEncoder.CreateWMAEncoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".aac":
                encoderWriter = MediaFoundationEncoder.CreateAACEncoder(stereoSource.WaveFormat, fileName, bitRate);
                writerType    = WriterType.EncoderWriter;
                break;

            case ".wav":
                waveWriter = new WaveWriter(fileName, capture.WaveFormat);
                writerType = WriterType.WaveWriter;
                break;
            }

            switch (writerType)
            {
            case WriterType.EncoderWriter:
                capture.DataAvailable += (s, e) =>
                {
                    encoderWriter.Write(e.Data, e.Offset, e.ByteCount);
                };
                break;

            case WriterType.WaveWriter:
                capture.DataAvailable += (s, e) =>
                {
                    waveWriter.Write(e.Data, e.Offset, e.ByteCount);
                };
                break;
            }

            // Start recording
            capture.Start();
        }
        public void Initialize(MMDevice captureDevice)
        {
            //BLARG 01.14.2020: Don't need the default when we're given an Audio Enpoint
            //MMDevice captureDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            WaveFormat deviceFormat = captureDevice.DeviceFormat;

            _audioEndpointVolume = AudioEndpointVolume.FromDevice(captureDevice);

            //DarthAffe 07.02.2018: This is a really stupid workaround to (hopefully) finally fix the surround driver issues
            for (int i = 1; i < 13; i++)
            {
                try { _capture = new WasapiLoopbackCapture(100, new WaveFormat(deviceFormat.SampleRate, deviceFormat.BitsPerSample, i)); } catch { /* We're just trying ... */ }
            }

            if (_capture == null)
            {
                throw new NullReferenceException("Failed to initialize WasapiLoopbackCapture");
            }

            //BLARG: Actually setting the Device
            _capture.Device = captureDevice;
            _capture.Initialize();

            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };
            _source = _soundInSource.WaveFormat.SampleRate == 44100
                          ? _soundInSource.ToStereo()
                          : _soundInSource.ChangeSampleRate(44100).ToStereo();

            _stream = new SingleBlockNotificationStream(_source.ToSampleSource());
            _stream.SingleBlockRead += StreamOnSingleBlockRead;

            _source = _stream.ToWaveSource();

            byte[] buffer = new byte[_source.WaveFormat.BytesPerSecond / 2];
            _soundInSource.DataAvailable += (s, aEvent) =>
            {
                while ((_source.Read(buffer, 0, buffer.Length)) > 0)
                {
                    ;
                }
            };

            _capture.Start();
        }
Пример #8
0
        public MP3Recorder(string filename)
        {
            if (File.Exists(filename))
            {
                File.Delete(filename);
            }
            wasapiCapture_ = new WasapiCapture();
            wasapiCapture_.Initialize();
            var
                wasapiCaptureSource = new SoundInSource(wasapiCapture_);

            stereoSource_ = wasapiCaptureSource.ToStereo();
            writer_       = MediaFoundationEncoder.CreateMP3Encoder(stereoSource_.WaveFormat, filename);
            byte []
            buffer = new byte[stereoSource_.WaveFormat.BytesPerSecond];
            wasapiCaptureSource.DataAvailable += (s, e) =>
            {
                int
                    read = stereoSource_.Read(buffer, 0, buffer.Length);
                writer_.Write(buffer, 0, read);
            };
            wasapiCapture_.Start();
        }
Пример #9
0
        public void Initialize()
        {
            MMDevice   captureDevice = MMDeviceEnumerator.DefaultAudioEndpoint(DataFlow.Render, Role.Console);
            WaveFormat deviceFormat  = captureDevice.DeviceFormat;

            _audioEndpointVolume = AudioEndpointVolume.FromDevice(captureDevice);

            //DarthAffe 07.02.2018: This is a really stupid workaround to (hopefully) finally fix the surround driver issues
            for (int i = 1; i < 13; i++)
            {
                try
                {
                    _capture = new WasapiLoopbackCapture(100, new WaveFormat(deviceFormat.SampleRate, deviceFormat.BitsPerSample, i));
                }
                catch
                { }
            }

            if (_capture == null)
            {
                throw new NullReferenceException("Failed to initialize WasapiLoopbackCapture");
            }

            _capture.Initialize();
            _soundInSource = new SoundInSource(_capture)
            {
                FillWithZeros = false
            };

            _stream = _soundInSource.WaveFormat.SampleRate == 44100
                ? new SingleBlockNotificationStream(_soundInSource.ToStereo().ToSampleSource())
                : new SingleBlockNotificationStream(_soundInSource.ChangeSampleRate(44100).ToStereo().ToSampleSource());

            _soundInSource.DataAvailable += OnSoundDataAvailable;

            _capture.Start();
        }
Пример #10
0
        /** Initializes the realtime audio processing handlers */
        public void BeginRecording()
        {
            // recoreds output data from wasapi loopback sound card.
            using (wasapiCapture = new WasapiLoopbackCapture())
            {
                wasapiCapture.Initialize();
                wasapiCaptureSource = new SoundInSource(wasapiCapture);

                // TODO: Stereo or Mono?
                using (var stereoSource = wasapiCaptureSource.ToStereo())
                {
                    // creates the spectrum Provider (Our own FFTProvider)
                    provider = new BasicSpectrumProvider(stereoSource.WaveFormat.Channels, stereoSource.WaveFormat.SampleRate, fftSize);
                    // creates the handler that uses the SpectrumProvider.
                    var handler = new FFTHandler(FftSize.Fft4096)
                    {
                        SpectrumProvider = provider,
                        UseAverage       = true,
                        height           = 100,
                        BarCount         = 50,
                        BarSpacing       = 2,
                        IsXLogScale      = true,
                        ScalingStrategy  = ScalingStrategy.Sqrt
                    };

                    // notifies the spectrum provider each block read
                    var notificationSource = new SingleBlockNotificationStream(wasapiCaptureSource.ToSampleSource());
                    notificationSource.SingleBlockRead += (s, a) => provider.Add(a.Left, a.Right);
                    var wsrc = notificationSource.ToWaveSource();

                    // reads through the wave source as it is playing
                    // This is the key to getting the realtime music.
                    byte[] buffer = new byte[wsrc.WaveFormat.BytesPerSecond];
                    wasapiCaptureSource.DataAvailable += (s, e) =>
                    {
                        int read = wsrc.Read(buffer, 0, buffer.Length);
                    };

                    // starts the listening.
                    wasapiCapture.Start();

                    // gathers the data and sends it to the handler in a loop.
                    var fftBuffer = new float[(int)fftSize];
                    while (true)
                    {
                        if (provider.GetFftData(fftBuffer))
                        {
                            Console.Clear();
                            handler.CreateSpectrumLineInternal(fftBuffer, 100);
                        }
                    }

                    // Stops Listening.
                    wasapiCapture.Stop();
                }
            }


            //    bool Data_Available = false;
            //    Double[] Audio_Samples = new Double[0];


            //    var waveIn = new WasapiLoopbackCapture();
            //    waveIn.DataAvailable += ( sender, e) =>
            //{
            //    Int32 sample_count = e.ByteCount / (waveIn.WaveFormat.BitsPerSample / 8);
            //    Single[] data = new Single[sample_count];

            //    for (int i = 0; i < sample_count; ++i)
            //    {
            //        data[i] = BitConverter.ToSingle(e.Data, i * 4);
            //    }

            //    int j = 0;
            //    Audio_Samples = new Double[sample_count / 2];
            //    for (int sample = 0; sample < data.Length; sample += 2)
            //    {
            //        Audio_Samples[j] = (Double)data[sample];
            //        Audio_Samples[j] += (Double)data[sample + 1];
            //        ++j;

            //        Console.WriteLine(Audio_Samples[j].ToString());
            //    }

            //    Data_Available = true;
            //};
            //    waveIn.Initialize();
            //    //waveIn.Stopped += OnRecordingStopped;
            //    waveIn.Start();

            //    while (true)
            //    {
            //        if (Data_Available)
            //        {
            //            Data_Available = false;
            //            //Console.WriteLine(Audio_Samples.ToString());
            //        }
            //    }



            //using (WasapiCapture capture = new WasapiLoopbackCapture())
            //{
            //    //if nessesary, you can choose a device here
            //    //to do so, simply set the device property of the capture to any MMDevice
            //    //to choose a device, take a look at the sample here: http://cscore.codeplex.com/

            //    //initialize the selected device for recording
            //    capture.Initialize();


            //    var eq = new Equalizer(new SoundInSource(capture));

            //    var fft = new FftProvider(3, FftSize.Fft1024);


            //    var tenb = Equalizer.Create10BandEqualizer(new SoundInSource(capture));


            //create a wavewriter to write the data to
            //using (WaveWriter w = new WaveWriter("dump.wav", capture.WaveFormat))
            //{


            //    //setup an eventhandler to receive the recorded data
            //    capture.DataAvailable += (s, e) =>
            //    {
            //            //save the recorded audio
            //            w.Write(e.Data, e.Offset, e.ByteCount);

            //    };

            //    Console.WriteLine("starting...");

            //    //start recording
            //    capture.Start();

            //    Console.ReadKey();

            //    capture.Stop();

            //}
        }
Пример #11
0
        private void buttonCheck_Click(object sender, EventArgs e)
        {
            if (listViewSources.SelectedItems.Count > 0)
            {
                if (_soundIn == null)
                {
                    if (SelectedDevice == null)
                    {
                        return;
                    }

                    buttonCheck.Enabled = false;
                    if (captureMode == CaptureMode.Capture)
                    {
                        _soundIn = new WasapiCapture();
                    }
                    else
                    {
                        _soundIn = new WasapiLoopbackCapture(100, new WaveFormat(48000, 24, 2));
                    }

                    _soundIn.Device = SelectedDevice;
                    _soundIn.Initialize();

                    var soundInSource = new SoundInSource(_soundIn)
                    {
                        FillWithZeros = SelectedDevice.DeviceFormat.Channels <= 1 ? true : false
                    };

                    var singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToStereo().ToSampleSource());
                    _finalSource = singleBlockNotificationStream.ToWaveSource();

                    singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStreamOnSingleBlockRead;

                    _soundIn.Start();
                    if (captureMode == CaptureMode.Capture)
                    {
                        if (SelectedDevice.DeviceFormat.Channels <= 1)
                        {
                            _soundOut = new WasapiOut()
                            {
                                Device = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Communications)
                            };
                        }
                        else
                        {
                            _soundOut = new WasapiOut();
                        }
                        _soundOut.Initialize(_finalSource);
                        _soundOut.Play();
                    }
                    else
                    {
                        byte[] buffer = new byte[_finalSource.WaveFormat.BytesPerSecond / 2];
                        soundInSource.DataAvailable += (s, ex) =>
                        {
                            int read;
                            while ((read = _finalSource.Read(buffer, 0, buffer.Length)) > 0)
                            {
                            }
                        };
                    }
                    buttonCheck.Enabled = true;
                    buttonRefreshMicro0phone.Enabled = false;
                    listViewSources.Enabled          = false;
                    checkBoxLoopback.Enabled         = false;
                    buttonCheck.Text = "Stop";
                }
                else
                {
                    buttonCheck.Enabled = false;
                    if (_soundOut != null)
                    {
                        _soundOut.Stop();
                        _soundOut.Dispose();
                        _soundOut = null;
                    }
                    if (_soundIn != null)
                    {
                        _soundIn.Stop();
                        _soundIn.Dispose();
                        _soundIn = null;
                    }
                    buttonCheck.Enabled              = true;
                    listViewSources.Enabled          = true;
                    buttonRefreshMicro0phone.Enabled = true;
                    checkBoxLoopback.Enabled         = true;
                    buttonCheck.Text = "Start";
                }
            }
            else
            {
                MessageBox.Show("Reload & Select a Device");
            }
        }