using System.IO; using NAudio.Wave; var waveFormat = new WaveFormat(44100, 1); // 44.1kHz, mono var waveIn = new WaveInEvent { WaveFormat = waveFormat }; var outputFilePath = "output.wav"; var writer = new WaveFileWriter(outputFilePath, waveFormat); waveIn.DataAvailable += (sender, e) => { writer.Write(e.Buffer, 0, e.BytesRecorded); }; waveIn.StartRecording(); Console.WriteLine("Recording... press ENTER to stop."); Console.ReadLine(); waveIn.StopRecording(); writer.Dispose();
using NAudio.Wave; var waveFormat = new WaveFormat(44100, 1); // 44.1kHz, mono var waveIn = new WaveInEvent { WaveFormat = waveFormat }; waveIn.DataAvailable += (sender, e) => { var buffer = e.Buffer; var maxAmplitude = 0f; for (int i = 0; i < buffer.Length; i += 2) { var amplitude = Math.Abs(BitConverter.ToInt16(buffer, i)); if (amplitude > maxAmplitude) maxAmplitude = amplitude; } Console.WriteLine($"Amplitude: {maxAmplitude}"); }; waveIn.StartRecording(); Console.WriteLine("Recording... press ENTER to stop."); Console.ReadLine(); waveIn.StopRecording();This example uses only the built-in C# libraries. It calculates the maximum amplitude of each buffer in real-time. Package library: The first example uses the NAudio package to work with audio data, while the second example only uses the built-in C# libraries.