/// <summary> /// Start recording sound to the WAV file. /// </summary> public void StartRecording() { if (_isRecording) { return; } _state = RecorderState.RecordingStopped; if (_captureInput == null || _waveWriter == null) { _Init(); } try { _captureInput?.StartRecording(); _isRecording = true; _state = RecorderState.Recording; Debug.WriteLine("Recording started.\n\tFile=" + _tempWavFilePath + "\n"); } catch (Exception e) { Debug.WriteLine("Error occurred while start Recording:" + e.Message); } }
/// <summary> /// Change options of audio source /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void AudioInputMenuItem_Checked(object sender, RoutedEventArgs e) { if (!(sender is MenuItem item)) { return; } switch (item.Header.ToString()) { case "None": _waveIn?.StopRecording(); _wasapiLoopbackCapture?.StopRecording(); _typeOfInput = TypeOfInput.None; break; case "Microphone": _typeOfInput = TypeOfInput.Microphone; try { _waveIn?.StartRecording(); _wasapiLoopbackCapture?.StopRecording(); } catch (Exception exp) { MessageBox.Show($"Cannot start capture audio from input device.\n{exp.Message}", "Error", MessageBoxButton.OK, MessageBoxImage.Error); } break; case "System": _waveIn?.StopRecording(); _wasapiLoopbackCapture?.StartRecording(); _typeOfInput = TypeOfInput.System; break; default: throw new ArgumentOutOfRangeException(); } UpdateSettings(_typeOfInput); ChangeChecked(item.Header.ToString(), item.Parent); }
public void Start() { _capture = new WasapiLoopbackCapture(SelectedDevice); _capture.DataAvailable += DataAvailable; _capture.StartRecording(); }
public void start() { source_stream.StartRecording(); }
static void Main(string[] args) { var name = "Swish Draws Animals"; var id = -1; var offset = TimeSpan.Zero; var startTime = DateTime.UtcNow; // Name if (args.Length > 0) { name = args[0]; } // Id if (args.Length > 1) { if (!int.TryParse(args[1], out id)) { Console.WriteLine($"Invalid id: {args[1]}"); } } // Time Offset if (args.Length > 2) { if (!TimeSpan.TryParse(args[2], out offset)) { Console.WriteLine($"Invalid offset: {args[2]}"); } } Console.WriteLine($"{name} {offset}"); var clientId = ConfigurationManager.AppSettings["ClientId"]; var clientIdTag = ConfigurationManager.AppSettings["ClientIdTag"]; var license = ConfigurationManager.AppSettings["License"]; var libPath = ConfigurationManager.AppSettings["SDKLibraryPath"]; var appVersion = Assembly.GetCallingAssembly().GetName().Version.ToString(); try { var gracenote = new Gracenote(clientId, clientIdTag, license, libPath, appVersion.ToString()); gracenote.Initialize(); var user = gracenote.User; var musicIDStream = new GnMusicIdStream(user, GnMusicIdStreamPreset.kPresetMicrophone, new StreamEventDelgate()); //musicIDStream.Options().ResultSingle(true); musicIDStream.Options().LookupData(GnLookupData.kLookupDataContent, true); WasapiLoopbackCapture capture = null; while (capture == null) { try { // Do audio magic (https://github.com/naudio/NAudio/blob/master/Docs/WasapiLoopbackCapture.md) capture = new WasapiLoopbackCapture() { ShareMode = AudioClientShareMode.Shared }; } catch (Exception e) { Console.WriteLine($"Unable to begin capture: {e.Message}"); Thread.Sleep(5); } } var cFormat = capture.WaveFormat; // musicIDStream can't handle the 32bit floats from WasapiLoopbackCapture var format = new WaveFormat(cFormat.SampleRate, 16, cFormat.Channels); musicIDStream.AudioProcessStart((uint)format.SampleRate, (uint)format.BitsPerSample, (uint)format.Channels); //var waveOut = new WaveFileWriter("output.wav", new WaveFormat()); capture.DataAvailable += (_, waveInEvent) => { var source = waveInEvent.Buffer; var sourceLen = waveInEvent.BytesRecorded; if (sourceLen > 0) { var dest = new byte[sourceLen / 2]; for (var i = 0; i < dest.Length / 2; ++i) { var temp = BitConverter.ToSingle(source, i * 4); var scaled = (short)(temp * 32768); // scale to 16 bits var scaledBytes = BitConverter.GetBytes(scaled); Array.Copy(scaledBytes, 0, dest, i * 2, scaledBytes.Length); } musicIDStream.AudioProcess(dest, (uint)dest.Length); //waveOut.Write(dest, 0, dest.Length); //waveOut.Flush(); } }; capture.RecordingStopped += (_, stoppedEventArgs) => { musicIDStream.AudioProcessStop(); }; var stop = false; Console.CancelKeyPress += (_, e) => { switch (capture.CaptureState) { case CaptureState.Capturing: case CaptureState.Starting: stop = true; break; default: break; } e.Cancel = true; }; var random = new Random(); capture.StartRecording(); while (!stop) { Console.WriteLine("Identifying"); DidMatch = false; musicIDStream.IdentifyAlbum(); Thread.Sleep(1000); if (DidMatch) { var sleep = random.Next(60, 90); Console.WriteLine($"Sleeping for {sleep} seconds."); for (var i = 0; i < sleep && !stop; ++i) { //Console.Write((i % 15 == 0) ? i.ToString() : "."); Thread.Sleep(1000); if (Console.KeyAvailable && Console.ReadKey(true).Key == ConsoleKey.I) { break; } } Console.WriteLine(); } else { var sleep = random.Next(23, 43); Console.WriteLine($"No match. Sleeping {sleep} seconds."); for (var i = 0; i < sleep && !stop; ++i) { //Console.Write("."); Thread.Sleep(1000); if (Console.KeyAvailable && Console.ReadKey(true).Key == ConsoleKey.I) { break; } } Console.WriteLine(); } } capture.StopRecording(); Thread.Sleep(500); capture.Dispose(); //waveOut.Close(); } catch (GnException e) { Console.WriteLine("GnException :" + e.ErrorDescription + " Code " + e.ErrorCode.ToString("X") + "API:" + e.ErrorAPI); Console.ReadKey(); } }
private void RecordTrack() { //start recording if (whichFile == 1) { elapsedTime += thisTrack1.DurationMs; captureInstance1 = new WasapiLoopbackCapture(); //format = new WaveFormat(44100, 2); recordedAudioWriter1 = new WaveFileWriter(WAVPATH + (whichFile == 1 ? "thisTrack1.wav" : "thisTrack2.wav"), captureInstance1.WaveFormat); // Handle data not yet available Stopwatch dataTimer = new Stopwatch(); dataTimer.Start(); captureInstance1.DataAvailable += (s, a) => { recordedAudioWriter1.Write(a.Buffer, 0, a.BytesRecorded); }; // Start recording ! Stopwatch timer1 = new Stopwatch(); timer1.Start(); captureInstance1.StartRecording(); //record this track while (timer1.ElapsedMilliseconds < thisTrack1.DurationMs) { //wait for this track to finish } error = spotify.PausePlayback(); timer1.Stop(); //finish recording; start next Stopwatch processTimer1 = new Stopwatch(); captureInstance1.StopRecording(); try { recordedAudioWriter1.Dispose(); recordedAudioWriter1 = null; captureInstance1.Dispose(); } catch (Exception err) { } fileToCopy = 1; whichFile = 2; CopyLastTrack(); Thread tMP3 = new Thread(CopyToMP3); tMP3.Start(); } else { elapsedTime += thisTrack2.DurationMs; captureInstance2 = new WasapiLoopbackCapture(); //format = new WaveFormat(44100, 2); recordedAudioWriter2 = new WaveFileWriter(WAVPATH + (whichFile == 1 ? "thisTrack1.wav" : "thisTrack2.wav"), captureInstance2.WaveFormat); captureInstance2.DataAvailable += (s, a) => { recordedAudioWriter2.Write(a.Buffer, 0, a.BytesRecorded); }; // Start recording ! Stopwatch timer2 = new Stopwatch(); timer2.Start(); captureInstance2.StartRecording(); //record this track while (timer2.ElapsedMilliseconds < thisTrack2.DurationMs) { //wait for this track to finish } //finish recording; start next error = spotify.PausePlayback(); timer2.Stop(); captureInstance2.StopRecording(); try { recordedAudioWriter2.Dispose(); recordedAudioWriter2 = null; captureInstance2.Dispose(); } catch (Exception err) { } fileToCopy = 2; whichFile = 1; CopyLastTrack(); Thread tMP3 = new Thread(CopyToMP3); tMP3.Start(); } }
/// <summary> /// Start Recording. /// </summary> public void Start() { _silenceOut.Play(); _capture.StartRecording(); }
static void Main(string[] args) { // 先注入一个dll,Hook要绘制的窗体的WndProc函数,吃掉WM_PAINT消息,不然任务管理器会闪烁 var dllInjectionContext = DllInjection.Injection("Taskmgr", Path.Combine(Environment.CurrentDirectory, "HookWinProc.dll")); controlCtrlDelegate = type => { switch (type) { case 0: // CTRL_C_EVENT case 2: // CTRL_CLOSE_EVENT case 5: // CTRL_LOGOFF_EVENT case 6: // CTRL_SHUTDOWN_EVENT // 控制台关闭后清理掉注入的dll DllInjection.FreeLibrary(dllInjectionContext); break; } return(true); }; SetConsoleCtrlHandler(controlCtrlDelegate, true); // 获得CPU核心数,其实我之后的代码已经写死默认是4个核心的情况 var kernelCount = Environment.ProcessorCount; var hwnd = WinApi.FindWindow("TaskManagerWindow", "任务管理器"); var root = new Window(hwnd); if (root.Childs.Count == 1 && root.Childs[0].ClassName == "NativeHWNDHost") { root = root.Childs[0]; } else { throw new Exception("未找到窗体"); } if (root.Childs.Count == 1 && root.Childs[0].ClassName == "DirectUIHWND") { root = root.Childs[0]; } else { throw new Exception("未找到窗体"); } // 拿到4个子窗体,但是顺序还不确定 var drawWindows = root.Childs .Where(w => w.ClassName == "CtrlNotifySink" && w.Childs.Count == 1 && w.Childs[0].ClassName == "CvChartWindow") .Select(w => w.Childs[0]) .OrderByDescending(w => w.Rect.Width * w.Rect.Height) .Take(kernelCount) .ToArray(); // 以下是捕获声音有关的代码,使用的是WASAPI Loopback的方式 var screenWidth = WinApi.GetScreenWidth(); FixedQueueArray <float> leftWaveQueue = new FixedQueueArray <float>(screenWidth), rightWaveQueue = new FixedQueueArray <float>(screenWidth); IWaveIn loopbackCapture = new WasapiLoopbackCapture(); var observer = new StreamObserver <float>(FFTSize, FFTSize / 2, 2); float[] leftFixedWaveData = new float[FFTSize], rightFixedWaveData = new float[FFTSize]; // 捕获声音 // leftFixedWaveData 和 rightFixedWaveData 之后会交给绘制频谱的线程处理 observer.Completed += newData => { lock (fftLock) { for (int i = 0; i < leftFixedWaveData.Length; i++) { leftFixedWaveData[i] = newData[2 * i + 0]; rightFixedWaveData[i] = newData[2 * i + 1]; } } }; // 捕获声音 // leftWaveQueue 和 rightWaveQueue 之后会交给绘制声波的线程处理 loopbackCapture.DataAvailable += (_, e) => { var waveData = MemoryMarshal.Cast <byte, float>(new ReadOnlySpan <byte>(e.Buffer, 0, e.BytesRecorded)); int copyLength = Math.Min(waveData.Length / 2, screenWidth); if (copyLength == 0) { return; } Span <float> leftNextData = stackalloc float[copyLength], rightNextData = stackalloc float[copyLength]; for (int i = 0; i < copyLength; i++) { leftNextData[i] = waveData[2 * i + 0]; rightNextData[i] = waveData[2 * i + 1]; } leftWaveQueue.Write(leftNextData); rightWaveQueue.Write(rightNextData); observer.Write(waveData); }; loopbackCapture.StartRecording(); var sampleRate = loopbackCapture.WaveFormat.SampleRate; bool polylineStyle = false; // 找出对应位置的子窗体 var leftTopWin = drawWindows.OrderBy(w => w.Rect.X + w.Rect.Y).First(); var rightTopWin = drawWindows.OrderBy(w => - w.Rect.X + w.Rect.Y).First(); var leftBottomWin = drawWindows.OrderBy(w => w.Rect.X - w.Rect.Y).First(); var rightBottomWin = drawWindows.OrderBy(w => - w.Rect.X - w.Rect.Y).First(); void StartThread(Action action) { new Thread(new ThreadStart(action)) { Priority = ThreadPriority.Highest }.Start(); } // 启动线程开始捕获声音并绘制,每个窗体一个线程 StartThread(() => DrawWave(leftTopWin, leftWaveQueue, polylineStyle, 1)); StartThread(() => DrawWave(rightTopWin, rightWaveQueue, polylineStyle, 2)); StartThread(() => DrawSpectrum(leftBottomWin, leftFixedWaveData, sampleRate, polylineStyle, 3)); StartThread(() => DrawSpectrum(rightBottomWin, rightFixedWaveData, sampleRate, polylineStyle, 4)); Thread.Sleep(Timeout.Infinite); }
static void Main(string[] args) { Console.WriteLine("Please insert path to save the sound file:"); string filepath = Console.ReadLine(); if (filepath.Length <= 0) { filepath = "output.wav"; } Console.Clear(); Console.WriteLine("Starting audio capture engine..."); WasapiLoopbackCapture captureInstance = new WasapiLoopbackCapture(); WaveFileWriter audioFileWriter = new WaveFileWriter(filepath, captureInstance.WaveFormat); captureInstance.DataAvailable += (s, a) => { audioFileWriter.Write(a.Buffer, 0, a.BytesRecorded); }; captureInstance.RecordingStopped += (s, a) => { audioFileWriter.Dispose(); audioFileWriter = null; captureInstance.Dispose(); }; captureInstance.StartRecording(); Console.WriteLine("Audio capture engine started succesfully!"); Console.WriteLine("Press esc or del to stop recording..."); while (true) { ConsoleKeyInfo keypress = Console.ReadKey(true); if (keypress.Key == ConsoleKey.Escape || keypress.Key == ConsoleKey.Delete) { captureInstance.StopRecording(); break; } } Console.WriteLine("Do you want to playback the recorded audio? (Y/N)"); ConsoleKeyInfo consoleKey = Console.ReadKey(); switch (consoleKey.KeyChar) { case 'y': case 'Y': WaveFileReader waveFileReader = new WaveFileReader(filepath); WaveOutEvent waveOut = new WaveOutEvent(); waveOut.Init(waveFileReader); waveOut.Play(); Console.WriteLine("Playing sound, press esc to abort."); while (true) { ConsoleKeyInfo keypress = Console.ReadKey(true); if (keypress.Key == ConsoleKey.Escape) { captureInstance.StopRecording(); return; } } } }
void doMagic() { Logger.mix.a("doMagic"); string lq = ""; recCap = null; micCap = null; recRe = micRe = null; ISampleProvider recProv; format = WaveFormat.CreateIeeeFloatWaveFormat(settings.samplerate, 2); //mixer = new MixingSampleProvider(format); mixa = new NPatch.Mixa(format); Logger.mix.a("create rec"); recCap = new WasapiLoopbackCapture(settings.devRec.mm); recCap.DataAvailable += recDev_DataAvailable_03; recIn = new BufferedWaveProvider(recCap.WaveFormat); if (recCap.WaveFormat.SampleRate != settings.samplerate) { Logger.mix.a("create rec resampler"); recRe = new MediaFoundationResampler(recIn, settings.samplerate); recRe.ResamplerQuality = 60; lq += "Incorrect samplerate on music device, resampling\n" + settings.devRec.mm.DeviceFriendlyName + "\n" + settings.devRec.mm.FriendlyName + "\n" + settings.devRec.id + "\n" + LSDevice.stringer(settings.devRec.wf) + "\n" + LSDevice.stringer(recCap.WaveFormat) + "\n\n"; } recProv = new WaveToSampleProvider((IWaveProvider)recRe ?? (IWaveProvider)recIn); recVol = new NPatch.VolumeSlider(); recVol.SetSource(recProv); mixa.AddMixerInput(recVol); Logger.mix.a("rec done"); killmic = new System.Windows.Forms.Timer(); killmic.Interval = 1000; killmic.Tick += killmic_Tick; micVol = new NPatch.VolumeSlider(); lq += micAdd(); //mixer.ReadFully = true; fork = new NPatch.Fork(mixa, 2); lameOutlet = fork.providers[1]; outVol = new NPatch.VolumeSlider(); outVol.SetSource(fork.providers[0]); muxer = new SampleToWaveProvider(outVol); Logger.mix.a("init mixer vol"); recVol.SetVolume((float)settings.mixer.vRec); micVol.SetVolume((float)settings.mixer.vMic); outVol.SetVolume((float)settings.mixer.vOut); recVol.boostLock = (float)settings.mixer.yRec; micVol.boostLock = (float)settings.mixer.yMic; recVol.boost = (float)settings.mixer.xRec; micVol.boost = (float)settings.mixer.xMic; recVol.muted = !settings.mixer.bRec; micVol.muted = !settings.mixer.bMic; outVol.muted = !settings.mixer.bOut; Logger.mix.a("create mixOut"); mixOut = new WasapiOut(settings.devOut.mm, AudioClientShareMode.Shared, false, 100); Logger.mix.a("init mixOut"); mixOut.Init(outVol); Logger.mix.a("rec.startRec"); recCap.StartRecording(); //System.Threading.Thread.Sleep(100); if (micCap != null) { Logger.mix.a("mic.startRec"); micCap.StartRecording(); } Logger.mix.a("mixOut.play (ready)"); mixOut.Play(); if (settings.vu) { recVol.enVU = true; micVol.enVU = true; outVol.enVU = true; bars[0].src = recVol; bars[1].src = micVol; bars[2].src = outVol; } if (!string.IsNullOrEmpty(lq)) { isLQ = lq; } /*byte[] buffer = new byte[outVol.WaveFormat.AverageBytesPerSecond * 10]; * while (true) * { * int i = wp16.Read(buffer, 0, fork.providers[1].avail()); * waver.Write(buffer, 0, i); * System.Threading.Thread.Sleep(10); * System.Windows.Forms.Application.DoEvents(); * }*/ }
static void Main(string[] sysargs) { WaveFormat format = WaveFormat.CreateIeeeFloatWaveFormat(1000000, 2); StartupArgs args = Initialize(sysargs); if (args.Host) { if (!int.TryParse(args.Port, out int port) || port < 0) { ErrorExit(-1, "Invalid port specified."); } using WasapiLoopbackCapture capture = new WasapiLoopbackCapture(); EventedListener listener = new EventedListener(IPAddress.Any, port); List <EventedClient> clients = new List <EventedClient>(); try { listener.Start(); } catch (Exception e) { ErrorExit(-1, $"{e.GetType().Name}: {e.Message}"); } listener.StartAcceptClient(); listener.ClientConnected += (s, args) => { lock (clients) { EventedClient client = args.Client; clients.Add(client); Console.WriteLine($"Client connected: {client.BaseSocket.RemoteEndPoint}"); } }; capture.DataAvailable += (sender, args) => { lock (clients) { List <EventedClient> clientsToRemove = new List <EventedClient>(); foreach (var client in clients) { try { client.SendData(args.Buffer, 0, args.BytesRecorded); } catch { clientsToRemove.Add(client); Console.WriteLine($"Client disconnected: {client.BaseSocket.RemoteEndPoint}"); } } foreach (var client in clientsToRemove) { clients.Remove(client); } } }; capture.StartRecording(); Console.WriteLine("Syncing audio as host..."); while (capture.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped) { ; } } else if (args.Sync) { if (!TryGetAddress(args.Address, out IPAddress address)) { ErrorExit(-1, "Invalid address specified."); } if (!int.TryParse(args.Port, out int port) || port < 0) { ErrorExit(-1, "Invalid port specified."); } EventedClient client = new EventedClient(); try { client.Connect(address, port); } catch { ErrorExit(-2, "Cannot connect to host"); } NetSampleProvider src = new NetSampleProvider(client); client.StartReceiveData(); WaveOut wout = new WaveOut(); wout.Init(src); wout.Play(); Console.WriteLine("Syncing audio as client..."); while (wout.PlaybackState != PlaybackState.Stopped) { ; } } else if (args.Help) { Console.WriteLine( $@"Null.AudioSync : Sync audio with another computer Null.AudioSync Command Arguments Commands: Host : Build a AudioSync server. Sync : Connect a AudioSync server. Arguments: Address : Should be specified when SyncAudio from a server. Port : Port will be listened or connected. default is 10001. "); } else { Console.WriteLine("Unknown command, use 'Null.AudioSync Help' for help"); } }
private void btnStartGrab_Click(object sender, EventArgs e) { // Redefine the capturer instance with a new instance of the LoopbackCapture class captureInstance = new WasapiLoopbackCapture(); // When the capturer receives audio, start writing the buffer into the mentioned file captureInstance.DataAvailable += (s, a) => { BeginInvoke((Action)(() => { float maxLeft = 0; float maxRight = 0; float currMaxLeft = 0; float currMaxRight = 0; int j = 0; for (int i = 0; i < a.BytesRecorded / 4; i += 8) { float leftSample = BitConverter.ToSingle(a.Buffer, i); float rightSample = BitConverter.ToSingle(a.Buffer, i + 4); if (j > sampleRate) { j = 0; waveChart.Series["LeftChannel"].Points.Add(maxLeft); if (waveChart.Series["LeftChannel"].Points.Count > 100) { waveChart.Series["LeftChannel"].Points.RemoveAt(0); } waveChart.Series["RightChannel"].Points.Add(maxRight); if (waveChart.Series["RightChannel"].Points.Count > 100) { waveChart.Series["RightChannel"].Points.RemoveAt(0); } // Rewrite on BackgroundWorker // https://docs.microsoft.com/en-us/aspnet/core/fundamentals/host/hosted-services?view=aspnetcore-2.1 if (arduinoPort != null && arduinoPort.IsOpen) { try { currMaxLeft = maxLeft * 1023; currMaxRight = maxRight * 1023; arduinoPort.Write("<" + currMaxLeft.ToString("F0") + "," + currMaxRight.ToString("F0") + ">"); Debug.WriteLine("<" + currMaxLeft.ToString("F0") + "," + currMaxRight.ToString("F0") + ">"); } catch (Exception ex) { Debug.WriteLine("Error while write to arduino: " + ex.Message); } } } else { maxLeft = Math.Max(maxLeft, leftSample); maxRight = Math.Max(maxRight, rightSample); j++; } } })); }; // When the Capturer Stops captureInstance.RecordingStopped += (s, a) => { captureInstance.Dispose(); }; // Start recording ! captureInstance.StartRecording(); // Enable "Stop button" and disable "Start Button" btnStartGrab.Enabled = false; btnStopGrab.Enabled = true; }
private void StartButton_Click(object sender, EventArgs e) { StartButton.Enabled = false; StopButton.Enabled = true; stopwatch.Start(); //-------------------SystemSoundRecord--------------------------- var dialog = new SaveFileDialog(); dialog.Filter = "Wave files | *.wav"; if (dialog.ShowDialog() != DialogResult.OK) { return; } outputFileNameSounds = dialog.FileName; capture = new WasapiLoopbackCapture(); var writer = new WaveFileWriter(outputFileNameSounds, capture.WaveFormat); capture.DataAvailable += async(s, ee) => { if (writer != null) { await writer.WriteAsync(ee.Buffer, 0, ee.BytesRecorded); await writer.FlushAsync(); } }; capture.RecordingStopped += (s, ee) => { if (writer != null) { writer.Dispose(); writer = null; } StartButton.Enabled = true; capture.Dispose(); }; capture.StartRecording(); //------------------MicRecord---------------------------------- var micFileDialog = new SaveFileDialog(); micFileDialog.Filter = "Wave files | *.wav"; if (micFileDialog.ShowDialog() != DialogResult.OK) { return; } outputFileNameMic = micFileDialog.FileName; wave = new WaveIn(); wave.WaveFormat = new WaveFormat(8000, 1); //44100, 1 wave.DeviceNumber = InputDeviceCombo.SelectedIndex; wave.DataAvailable += Wave_DataAvailable; wave.RecordingStopped += Wave_RecordingStopped; writerMic = new WaveFileWriter(outputFileNameMic, wave.WaveFormat); wave.StartRecording(); }