/// <summary> /// Spectrum /// </summary> public static void Spectrum() { // Initialize the led strip Criteria.Spectrum = true; Util.Setup(); int task = Util.StartTask(); byte[] audioBuffer = new byte[256]; using AudioCapture audioCapture = new AudioCapture(AudioCapture.AvailableDevices[1], 22000, ALFormat.Mono8, audioBuffer.Length); audioCapture.Start(); int cycle = 0; int debut = -20; while (Util.TaskWork(task)) { double[] fft = Capture(audioCapture, audioBuffer); float[] fftData = SetFFT(audioBuffer, fft); Spectrum(fftData, 0.05); debut = AffTitre(cycle, debut); AffHeure(); Util.SetLeds(); Spectrum(cycle++); } }
public void CaptureLoop(string device) { using (AudioContext context = new AudioContext()) { Console.WriteLine("Starting capture loop."); AudioCapture capture = new AudioCapture(device, Frequency, Format, BufferSize); capture.Start(); byte[] buffer = new byte[BufferSize]; Console.WriteLine("Started capture loop."); while (AudioHandler.Running) { int samples = capture.AvailableSamples; if (samples > 0) { if (samples > BufferSize / SampleSize) { samples = BufferSize / SampleSize; } //Console.WriteLine ("samples: " + samples); capture.ReadSamples(buffer, samples); Stream.Write(buffer, 0, samples * SampleSize); } } Console.WriteLine("Finished capture loop."); AudioHandler.Running = false; } }
/// <summary> /// 启动语音 /// </summary> public void Start() { _transferClient.Connect(); _audioPlayer.Play(); _audioCapture.Start(); }
/// <summary> /// The main. /// </summary> /// <param name="args"> /// The args. /// </param> static void Main(string[] args) { var audioBuffer = new byte[256]; var fftData = new byte[256]; var fft = new double[256]; double fftavg = 0; float amplitude = 10.0f; var fftTransoformer = new LomontFFT(); var writers = new List<IWriter>(); writers.Add(new KeyboardWriter()); writers.Add(new ConsoleWriter()); var audioCapture = new AudioCapture(AudioCapture.DefaultDevice, 8000, ALFormat.Mono8, 256); audioCapture.Start(); audioCapture.ReadSamples(audioBuffer, 256); while (true) { for (int j = 0; j < 92; j++) { // reset mem for (int i = 0; i < 256; i++) { audioBuffer[i] = 0; fftData[i] = 0; fft[i] = 0; } audioCapture.ReadSamples(audioBuffer, 256); for (int i = 0; i < 256; i++) { fft[i] = (audioBuffer[i] - 128) * amplitude; } fftTransoformer.TableFFT(fft, true); for (int i = 0; i < 256; i += 2) { double fftmag = Math.Sqrt((fft[i] * fft[i]) + (fft[i + 1] * fft[i + 1])); fftavg += fftmag; fftData[i] = (byte)fftmag; fftData[i + 1] = fftData[i]; } fftavg /= 10; writers.ForEach(x => x.Write(j, fftData)); //Thread.Sleep(15); Thread.Sleep(20); } } }
/// <summary> /// Allows the game to perform any initialization it needs to before starting to run. /// This is where it can query for any required services and load any non-graphic /// related content. Calling base.Initialize will enumerate through any components /// and initialize them as well. /// </summary> protected override void Initialize() { // TODO: Add your initialization logic here _dynamicSound = new DynamicSoundEffectInstance(48000, AudioChannels.Mono, 32); _audioCapture = new AudioCapture(); _audioCapture.BufferReady += BufferReady; _audioCapture.Start(); base.Initialize(); }
public void StartEcho() { if (Capture != null) { StopEcho(); } PlaybackSrc = AL.GenSource(); Capture = new AudioCapture(AudioCapture.DefaultDevice, SampleRate, ALFormat.Mono16, 8192); Capture.Start(); AL.Source(PlaybackSrc, ALSourceb.SourceRelative, true); }
public void Start() { if (capture == null || capture.IsRunning) { return; } lock (syncObj) { capture.Start(); systemTimer = new Timer(OnRecording, null, GetTimerTimeOut(), -1); } }
/// <summary> /// Start recording from the Microphone /// </summary> public void StartRecording() { if (IsMicrophoneValid) { //Start capturing data audio_capture.Start(); continuePolling = true; //Spin up the thread to process all the mic data if (workerThread == null || workerThread.IsAlive == false) { workerThread = new Thread(PollMicrophoneForData); workerThread.Start(); } } }
public void Start() { if (_capture == null || _capture.IsRunning) { return; } lock (_syncObj) { _capture.Start(); if (_captureTimer == null) { _captureTimer = new Timer(OnRecording, null, GetTimerTimeOut(), -1); } } }
/// <summary> /// Start encoder threads & capture devices for each job. /// </summary> private void InnerStartup() { try { EncoderRunning = false; synchronised = false; //System.Threading.ThreadPool.SetMaxThreads(90, 90); // Start a new thread for each job coreloops = new List <Thread>(); foreach (var pkg in Packages) { coreloops.Add(NewEncoderThread(pkg)); } if (cam != null) { cam.UseAbsoluteTime = this.UseAbsoluteTime; } if (mic != null) { mic.UseAbsoluteTime = this.UseAbsoluteTime; } // Start capturing! (buffers should start to fill from now on) if (cam != null) { cam.Start(); // cameras usually take longer to start than microphones, so start in serial and cameras first. } if (mic != null) { mic.Start(); } FrameCount = 0; // Sync frames: AdjustFrameSleep(); EncoderRunning = true; WaitAndCompensateCaptureTimes(); } catch { EncoderRunning = false; } }
/// <summary> /// Graph /// </summary> public static void Graph() { // Initialize the led strip Util.Setup(); int task = Util.StartTask(); byte[] audioBuffer = new byte[256]; using AudioCapture audioCapture = new AudioCapture(AudioCapture.AvailableDevices[1], 22000, ALFormat.Mono8, audioBuffer.Length); audioCapture.Start(); int cycle = 0; int debut = -20; while (Util.TaskWork(task)) { Graph(audioCapture, audioBuffer); AffHeure(); debut = AffTitre(cycle++, debut); Util.SetLeds(); Util.Context.Pixels.Reset(); } }
/// <summary> /// Spectrograph /// </summary> public static void Spectrograph() { // Initialize the led strip Util.Setup(); int task = Util.StartTask(); byte[] audioBuffer = new byte[256]; using AudioCapture audioCapture = new AudioCapture(AudioCapture.AvailableDevices[1], 22000, ALFormat.Mono8, audioBuffer.Length); audioCapture.Start(); int cycle = 0; while (Util.TaskWork(task)) { double[] fft = Capture(audioCapture, audioBuffer); float[] fftData = SetFFT(audioBuffer, fft); Spectrograph(fftData); Spectrograph(cycle++); Util.SetLeds(); } }
void StartRecording() { try { audio_context = new AudioContext(); } catch (AudioException ae) { MessageBox.Show("Fatal: Cannot continue without a playback device.\nException caught when opening playback device.\n" + ae.Message); Application.Exit(); } AL.Listener(ALListenerf.Gain, (float)numericUpDown_PlaybackGain.Value); src = AL.GenSource(); int sampling_rate = (int)numericUpDown_Frequency.Value; double buffer_length_ms = (double)numericUpDown_BufferLength.Value; int buffer_length_samples = (int)((double)numericUpDown_BufferLength.Value * sampling_rate * 0.001 / BlittableValueType.StrideOf(buffer)); try { audio_capture = new AudioCapture((string)comboBox_RecorderSelection.SelectedItem, sampling_rate, ALFormat.Mono16, buffer_length_samples); } catch (AudioDeviceException ade) { MessageBox.Show("Exception caught when opening recording device.\n" + ade.Message); audio_capture = null; } if (audio_capture == null) { return; } audio_capture.Start(); timer_GetSamples.Start(); timer_GetSamples.Interval = (int)(buffer_length_ms / 2 + 0.5); // Tick when half the buffer is full. }
private void ChEnable_CheckedChanged(object sender, EventArgs e) { if (ChEnable.Checked) { try { AudioCapturer = new AudioCapture(AudioCapture.AvailableDevices[LsAudioDevices.SelectedIndex], 8000, ALFormat.Mono8, 256); AudioCapturer.Start(); } catch { } } else { if (AudioCapturer != null) { AudioCapturer.Stop(); AudioCapturer.Dispose(); AudioCapturer = null; System.Threading.Thread.Sleep(100); } } }
public override void Enable() { if (enabled) { return; } // Fok I'm so r0wdy // Open audio device try { audioCapture = new AudioCapture(); audioCapture.onProcessAudio += this.AudioBufferFull; //audioLoaded = true; leftch = new float[audioCapture.NumSamples]; rightch = new float[audioCapture.NumSamples]; fft = new DSP.FastFourierTransform(fftWidth, audioCapture.SampleRate); fft.NumBands = 6; //beatDetector = new DSP.BeatDetector(audioCapture.WaveFormat, 1000, audioCapture.NumSamples); //sonogram = new DSP.Sonogram(d2dCanvas.Height); intensityDetector = new DSP.IntensityDetector(); audioCapture.Start(); } catch (AudioCaptureException ex) { throw new Exception("Could not open audio device", ex); } enabled = true; }
void StartRecording() { try { audio_context = new AudioContext(); } catch (AudioException ae) { MessageBox.Show("Fatal: Cannot continue without a playback device.\nException caught when opening playback device.\n" + ae.Message); Application.Exit(); } AL.Listener(ALListenerf.Gain, (float)numericUpDown_PlaybackGain.Value); src = AL.GenSource(); int sampling_rate = (int)numericUpDown_Frequency.Value; double buffer_length_ms = (double)numericUpDown_BufferLength.Value; int buffer_length_samples = (int)((double)numericUpDown_BufferLength.Value * sampling_rate * 0.001 / BlittableValueType.StrideOf(buffer)); try { audio_capture = new AudioCapture((string)comboBox_RecorderSelection.SelectedItem, sampling_rate, ALFormat.Mono16, buffer_length_samples); } catch (AudioDeviceException ade) { MessageBox.Show("Exception caught when opening recording device.\n" + ade.Message); audio_capture = null; } if (audio_capture == null) return; audio_capture.Start(); timer_GetSamples.Start(); timer_GetSamples.Interval = (int)(buffer_length_ms / 2 + 0.5); // Tick when half the buffer is full. }
/// <summary> /// VuMeter /// </summary> /// <param name="criteria"></param> public static void VuMeter() { // Initialize the led strip Util.Setup(); int task = Util.StartTask(); Couleur couleur = Couleur.Get(0, 0, 8); Random ra = new Random(); bool whiteBgColor = true; if (ra.Next(1, 3) == 1) { couleur = Couleur.Get(63, 63, 127); whiteBgColor = false; } double max = 0; CaractereList caracteres = new CaractereList(Util.Context.Largeur); byte[] audioBuffer = new byte[256]; using AudioCapture audioCapture = new AudioCapture(AudioCapture.AvailableDevices[1], 22000, ALFormat.Mono8, audioBuffer.Length); audioCapture.Start(); while (Util.TaskWork(task)) { max -= 0.5; double[] fft = Capture(audioCapture, audioBuffer); if (fft.Max(a => Math.Abs(a)) > max) { max = fft.Max(a => Math.Abs(a)); } if (whiteBgColor) { foreach (Pixel pixel in Util.Context.Pixels) { pixel.Set(127, 127, 127); } } caracteres.SetText("VU"); Util.Context.Pixels.Print(caracteres.GetCaracteres(), 5, 12, couleur); Couleur couleurMax = couleur; //lumiere max if (max > 75) { couleurMax = Couleur.Get(127, 0, 0); } Util.Context.Pixels.GetCoordonnee(17, 13).SetColor(couleurMax); Util.Context.Pixels.GetCoordonnee(18, 13).SetColor(couleurMax); Util.Context.Pixels.GetCoordonnee(17, 14).SetColor(couleurMax); Util.Context.Pixels.GetCoordonnee(18, 14).SetColor(couleurMax); //dessin Util.Context.Pixels.GetCoordonnee(1, 10).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(2, 10).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(3, 10).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(4, 9).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(5, 9).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(6, 9).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(7, 9).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(8, 8).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(9, 8).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(10, 8).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(11, 8).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(12, 9).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(13, 9).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(14, 9).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(15, 9).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(16, 10).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(17, 10).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(18, 10).SetColor(Couleur.Get(127, 0, 0)); //Moins Util.Context.Pixels.GetCoordonnee(1, 4).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(2, 4).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(3, 4).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(2, 8).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(2, 9).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(6, 7).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(6, 8).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(9, 6).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(9, 7).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(11, 6).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(11, 7).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(13, 7).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(13, 8).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(15, 7).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(15, 8).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(17, 8).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(17, 9).SetColor(Couleur.Get(127, 0, 0)); //Plus Util.Context.Pixels.GetCoordonnee(17, 3).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(16, 4).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(17, 4).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(18, 4).SetColor(Couleur.Get(127, 0, 0)); Util.Context.Pixels.GetCoordonnee(17, 5).SetColor(Couleur.Get(127, 0, 0)); //base Util.Context.Pixels.GetCoordonnee(8, 18).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(9, 18).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(10, 18).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(11, 18).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(7, 19).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(8, 19).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(9, 19).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(10, 19).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(11, 19).SetColor(couleur); Util.Context.Pixels.GetCoordonnee(12, 19).SetColor(couleur); //aiguille for (int r = 2; r < 18; r++) { Util.Context.Pixels.GetCoordonnee(GetCercleCoord(max + 315, r)).SetColor(couleur); } Util.SetLeds(); Util.Context.Pixels.Reset(); } }
public void Start() { if (null != _audioContext) return; _audioContext = new AudioContext(); AL.Listener(ALListenerf.Gain, 1.0f); _audioSource = AL.GenSource(); _audioCapture = new AudioCapture(String.Empty, _samplingRate, OpenTK.Audio.OpenAL.ALFormat.Mono16, _readBuffer.Length); _audioCapture.Start(); }
public void start() { myDevice.Start(); }
public RecorderDiagnostic() { Trace.WriteLine("--- AudioCapture related errors ---"); IsDeviceAvailable = false; try { r = new AudioCapture(AudioCapture.DefaultDevice, 16000, ALFormat.Mono16, 4096); } catch (AudioDeviceException ade) { Trace.WriteLine("AudioCapture Exception caught: " + ade.Message); return; } IsDeviceAvailable = true; DeviceName = r.CurrentDevice; CheckRecorderError("Alc.CaptureOpenDevice"); r.Start(); CheckRecorderError("Alc.CaptureStart"); Thread.Sleep(100); r.Stop(); CheckRecorderError("Alc.CaptureStop"); byte[] Buffer = new byte[8192]; Thread.Sleep(10); // Wait for a few samples to become available. int SamplesBefore = r.AvailableSamples; CheckRecorderError("Alc.GetInteger(...CaptureSamples...)"); r.ReadSamples(Buffer, (SamplesBefore > 4096 ? 4096 : SamplesBefore)); CheckRecorderError("Alc.CaptureSamples"); int SamplesCaptured = SamplesBefore - r.AvailableSamples; uint ZeroCounter = 0; for (int i = 0; i < SamplesCaptured * 2; i++) { if (Buffer[i] == 0) ZeroCounter++; } for (int i = 0; i < SamplesCaptured; i++) { short sample = BitConverter.ToInt16(Buffer, i * 2); if (sample > MaxSample) MaxSample = sample; if (sample < MinSample) MinSample = sample; } if (ZeroCounter < SamplesCaptured * 2 && SamplesCaptured > 0) BufferContentsAllZero = false; else BufferContentsAllZero = true; r.Dispose(); CheckRecorderError("Alc.CaptureCloseDevice"); // no playback test needed due to Parrot test app. /* uint buf; AL.GenBuffer(out buf); AL.BufferData(buf, ALFormat.Mono16, BufferPtr, SamplesCaptured * 2, 16000); uint src; AL.GenSource(out src); AL.BindBufferToSource(src, buf); AL.Listener(ALListenerf.Gain, 16.0f); AL.SourcePlay(src); while (AL.GetSourceState(src) == ALSourceState.Playing) { Thread.Sleep(0); } AL.SourceStop(src); AL.DeleteSource(ref src); AL.DeleteBuffer(ref buf); */ }
public RecorderDiagnostic() { Trace.WriteLine("--- AudioCapture related errors ---"); IsDeviceAvailable = false; try { r = new AudioCapture(AudioCapture.DefaultDevice, 16000, ALFormat.Mono16, 4096); } catch (AudioDeviceException ade) { Trace.WriteLine("AudioCapture Exception caught: " + ade.Message); return; } IsDeviceAvailable = true; DeviceName = r.CurrentDevice; CheckRecorderError("Alc.CaptureOpenDevice"); r.Start(); CheckRecorderError("Alc.CaptureStart"); Thread.Sleep(100); r.Stop(); CheckRecorderError("Alc.CaptureStop"); byte[] Buffer = new byte[8192]; Thread.Sleep(10); // Wait for a few samples to become available. int SamplesBefore = r.AvailableSamples; CheckRecorderError("Alc.GetInteger(...CaptureSamples...)"); r.ReadSamples(Buffer, (SamplesBefore > 4096 ? 4096 : SamplesBefore)); CheckRecorderError("Alc.CaptureSamples"); int SamplesCaptured = SamplesBefore - r.AvailableSamples; uint ZeroCounter = 0; for (int i = 0; i < SamplesCaptured * 2; i++) { if (Buffer[i] == 0) { ZeroCounter++; } } for (int i = 0; i < SamplesCaptured; i++) { short sample = BitConverter.ToInt16(Buffer, i * 2); if (sample > MaxSample) { MaxSample = sample; } if (sample < MinSample) { MinSample = sample; } } if (ZeroCounter < SamplesCaptured * 2 && SamplesCaptured > 0) { BufferContentsAllZero = false; } else { BufferContentsAllZero = true; } r.Dispose(); CheckRecorderError("Alc.CaptureCloseDevice"); // no playback test needed due to Parrot test app. /* * uint buf; * AL.GenBuffer(out buf); * AL.BufferData(buf, ALFormat.Mono16, BufferPtr, SamplesCaptured * 2, 16000); * uint src; * AL.GenSource(out src); * AL.BindBufferToSource(src, buf); * AL.Listener(ALListenerf.Gain, 16.0f); * AL.SourcePlay(src); * while (AL.GetSourceState(src) == ALSourceState.Playing) * { * Thread.Sleep(0); * } * AL.SourceStop(src); * * AL.DeleteSource(ref src); * AL.DeleteBuffer(ref buf); */ }
static void Main(string[] args) { foreach (string device in AudioDevice.Devices) { Console.WriteLine(device); } foreach (string device in AudioDevice.CaptureDevices) { Console.WriteLine(device); } AudioDevice playerEar = new AudioDevice(); Console.WriteLine(AudioDevice.CurrentDevice.Name); AudioClip clip = new AudioClip("Assets/jumping.ogg"); AudioClip laser = new AudioClip("Assets/laser.wav"); AudioClip backgroundMusic = new AudioClip("Assets/test_wikipedia_mono.ogg"); Console.WriteLine(clip.Channels); Console.WriteLine(clip.Frequency); Console.WriteLine(clip.Samples); Console.WriteLine(clip.Duration); AudioSource source = new AudioSource(); source.Play(clip); AudioCapture microphone = new AudioCapture(22050, 1, 5f); AudioBuffer micBuffer = new AudioBuffer(); microphone.Start(); AudioSource background = new AudioSource(); Window window = new Window(1024, 576, "Aiv.Audio Example"); background.Position = new OpenTK.Vector3(window.Width / 2, window.Height / 2, 0); background.ReferenceDistance = 50; background.MaxDistance = 100; background.RolloffFactor = 1f; Sprite sprite = new Sprite(100, 100); while (window.opened) { background.Stream(backgroundMusic, window.deltaTime); if (window.GetKey(KeyCode.Space)) { source.Play(clip); } if (window.GetKey(KeyCode.Return)) { source.Play(laser); } if (window.GetKey(KeyCode.ShiftRight)) { microphone.Read(micBuffer); source.Play(micBuffer); } if (window.GetKey(KeyCode.Right)) { sprite.position.X += 100 * window.deltaTime; } if (window.GetKey(KeyCode.Left)) { sprite.position.X -= 100 * window.deltaTime; } if (window.GetKey(KeyCode.Up)) { sprite.position.Y -= 100 * window.deltaTime; } if (window.GetKey(KeyCode.Down)) { sprite.position.Y += 100 * window.deltaTime; } playerEar.Position = new OpenTK.Vector3(sprite.position.X, sprite.position.Y, 0); source.Position = playerEar.Position; sprite.DrawSolidColor(1f, 0, 0); window.Update(); } }
public Task <string> RecordToWav() { Directory.CreateDirectory($"./{_configuration.WavFilesFolderName}"); var wavFile = $"./{_configuration.WavFilesFolderName}/{Guid.NewGuid()}.wav"; var recorders = AudioCapture.AvailableDevices; for (int i = 0; i < recorders.Count; i++) { Console.WriteLine(recorders[i]); } Console.WriteLine("-----"); const int samplingRate = 44100; // Samples per second const ALFormat alFormat = ALFormat.Mono16; const ushort bitsPerSample = 16; // Mono16 has 16 bits per sample const ushort numChannels = 1; // Mono16 has 1 channel using (var f = File.OpenWrite(wavFile)) using (var sw = new BinaryWriter(f)) { // Read This: http://soundfile.sapp.org/doc/WaveFormat/ sw.Write(new char[] { 'R', 'I', 'F', 'F' }); sw.Write(0); // will fill in later sw.Write(new char[] { 'W', 'A', 'V', 'E' }); // "fmt " chunk (Google: WAVEFORMATEX structure) sw.Write(new char[] { 'f', 'm', 't', ' ' }); sw.Write(16); // chunkSize (in bytes) sw.Write((ushort)1); // wFormatTag (PCM = 1) sw.Write(numChannels); // wChannels sw.Write(samplingRate); // dwSamplesPerSec sw.Write(samplingRate * numChannels * (bitsPerSample / 8)); // dwAvgBytesPerSec sw.Write((ushort)(numChannels * (bitsPerSample / 8))); // wBlockAlign sw.Write(bitsPerSample); // wBitsPerSample // "data" chunk sw.Write(new char[] { 'd', 'a', 't', 'a' }); sw.Write(0); // will fill in later // 10 seconds of data. overblown, but it gets the job done const int bufferLength = samplingRate * 10; int samplesWrote = 0; Console.WriteLine($"Recording from: {recorders[0]}"); using (var audioCapture = new AudioCapture( recorders[0], samplingRate, alFormat, bufferLength)) { var buffer = new short[bufferLength]; audioCapture.Start(); for (int i = 0; i < _configuration.SecondsToRecord; ++i) { Thread.Sleep(1000); // give it some time to collect samples var samplesAvailable = audioCapture.AvailableSamples; audioCapture.ReadSamples(buffer, samplesAvailable); for (var x = 0; x < samplesAvailable; ++x) { sw.Write(buffer[x]); } samplesWrote += samplesAvailable; Console.WriteLine($"Wrote {samplesAvailable}/{samplesWrote} samples..."); } audioCapture.Stop(); } sw.Seek(4, SeekOrigin.Begin); // seek to overall size sw.Write(36 + samplesWrote * (bitsPerSample / 8) * numChannels); sw.Seek(40, SeekOrigin.Begin); // seek to data size position sw.Write(samplesWrote * (bitsPerSample / 8) * numChannels); } return(Task.FromResult(wavFile)); }