void StartListen() { loopbackCapture = new WasapiLoopbackCapture(); loopbackCapture.Initialize(); soundInSource = new SoundInSource(loopbackCapture); loopbackCapture.Start(); singleBlockNotificationStream = new SingleBlockNotificationStream(soundInSource.ToSampleSource()); realTimeSource = singleBlockNotificationStream.ToWaveSource(); soundInSource.DataAvailable += DataAvailable; singleBlockNotificationStream.SingleBlockRead += SingleBlockNotificationStream_SingleBlockRead; }
public FormMain() { Application.ApplicationExit += Application_ApplicationExit; InitializeComponent(); detector = new SoundEnergyDetector(); detector.BeatDetected += Detector_BeatDetected; wasapi = new WasapiLoopbackCapture(10); wasapi.DataAvailable += Wasapi_DataAvailable; wasapi.Initialize(); wasapi.Start(); chart1.ChartAreas[0].AxisY.Maximum = 1; chart1.ChartAreas[0].AxisX.Minimum = 0; chart1.ChartAreas[0].AxisX.Maximum = detector.WindowSize; }
public SoundEffect(SoundEffectConfig config) : base(config) { _soundIn = new WasapiLoopbackCapture(); _soundIn.Initialize(); var soundInSource = new SoundInSource(_soundIn); var sampleSource = soundInSource.ToSampleSource(); const FftSize fftSize = FftSize.Fft1024; _fftBuffer = new float[(int)fftSize]; _spectrumProvider = new SpectrumProvider(sampleSource.WaveFormat.Channels, sampleSource.WaveFormat.SampleRate, fftSize); var notificationSource = new DataNotificationSource(sampleSource); notificationSource.DataRead += (s, e) => _spectrumProvider.Add(e.Data, e.Data.Length); var waveSource = notificationSource.ToWaveSource(16); var buffer = new byte[waveSource.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += (s, e) => { while (waveSource.Read(buffer, 0, buffer.Length) > 0) { ; } }; _spectrum = new LedSpectrum(GenerateColor) { FftSize = fftSize, SpectrumProvider = _spectrumProvider, UseAverage = Config.UseAverage, MinimumFrequency = Config.MinimumFrequency, MaximumFrequency = Config.MaximumFrequency, ScalingStrategy = Config.ScalingStrategy, ScalingFactor = Config.ScalingFactor, IsXLogScale = false, SpectrumResolution = (int)fftSize }; _spectrum.UpdateFrequencyMapping(); _soundIn.Start(); }
public static void listen(long ms) { using (WasapiCapture capture = new WasapiLoopbackCapture()) { //if nessesary, you can choose a device here //to do so, simply set the device property of the capture to any MMDevice //to choose a device, take a look at the sample here: http://cscore.codeplex.com/ double time = DateTime.Now.TimeOfDay.TotalMilliseconds; //initialize the selected device for recording capture.Initialize(); //create a wavewriter to write the data to using (WaveWriter w = new WaveWriter("dance_r.wav", capture.WaveFormat)) { bool caught = false; //setup an eventhandler to receive the recorded data capture.DataAvailable += (s, e) => { //save the recorded audio Console.WriteLine(e.ByteCount); caught = true; //w.Write(e.Data, e.Offset, e.ByteCount); }; //start recording capture.Start(); while (!caught) { } //stop recording capture.Stop(); } } }
public void StartListen() { _loopbackCapture = new WasapiLoopbackCapture(); _loopbackCapture.Initialize(); _soundInSource = new SoundInSource(_loopbackCapture); _basicSpectrumProvider = new BasicSpectrumProvider(_soundInSource.WaveFormat.Channels, _soundInSource.WaveFormat.SampleRate, CFftSize); _lineSpectrum = new LineSpectrum(CFftSize) { SpectrumProvider = _basicSpectrumProvider, BarCount = _spectrumSize, UseAverage = true, IsXLogScale = true, ScalingStrategy = _scalingStrategy }; _loopbackCapture.Start(); _singleBlockNotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource()); _realtimeSource = _singleBlockNotificationStream.ToWaveSource(); byte[] buffer = new byte[_realtimeSource.WaveFormat.BytesPerSecond / 2]; _soundInSource.DataAvailable += (s, ea) => { while (_realtimeSource.Read(buffer, 0, buffer.Length) > 0) { float[] spectrumData = _lineSpectrum.GetSpectrumData(MaxAudioValue); if (spectrumData != null && _receiveAudio != null) { _receiveAudio(spectrumData); } } }; _singleBlockNotificationStream.SingleBlockRead += singleBlockNotificationStream_SingleBlockRead; }
public void startRecording(MMDevice micDevice, MMDevice speakDevice) { isRecording = true; window.LockUI(); playSilence(); makeFileNames(); micCapture = new WasapiCapture(); micCapture.Device = micDevice; micCapture.Initialize(); speakCapture = new WasapiLoopbackCapture(); speakCapture.Device = speakDevice; speakCapture.Initialize(); micSource = new SoundInSource(micCapture); micWriter = MediaFoundationEncoder.CreateMP3Encoder(micSource.WaveFormat, micFileName); byte[] micBuffer = new byte[micSource.WaveFormat.BytesPerSecond]; micSource.DataAvailable += (s, e) => { int read = micSource.Read(micBuffer, 0, micBuffer.Length); micWriter.Write(micBuffer, 0, read); }; micCapture.Start(); speakSource = new SoundInSource(speakCapture); speakWriter = MediaFoundationEncoder.CreateMP3Encoder(speakSource.WaveFormat, speakFileName); byte[] speakBuffer = new byte[speakSource.WaveFormat.BytesPerSecond]; speakSource.DataAvailable += (s, e) => { int read = speakSource.Read(speakBuffer, 0, speakBuffer.Length); speakWriter.Write(speakBuffer, 0, read); }; speakCapture.Start(); }
// These WasapiCapture objects _and_ their WasapiCapture.Device members must be // Dispose()d when you're done with them. static Queue <WasapiCapture> GetAudioCaptures() { // This is run on another thread; it should not use UnityEngine APIs var q = new Queue <WasapiCapture>(); using (var deviceEnumerator = new MMDeviceEnumerator()) using (var activeDevices = deviceEnumerator.EnumAudioEndpoints( DataFlow.Render, DeviceState.Active)) { foreach (MMDevice device in activeDevices) { var audioCapture = new WasapiLoopbackCapture(); audioCapture.Device = device; try { audioCapture.Initialize(); q.Enqueue(audioCapture); } catch (CSCore.CoreAudioAPI.CoreAudioAPIException) { audioCapture.Device.Dispose(); audioCapture.Dispose(); } } } return(q); }
static int Main(string[] args) { int time; string output_file; switch (args.Length) { case 1: if (args[0] == "-h") { System.Console.WriteLine("Usage:"); System.Console.WriteLine(" LoopbackCapture.exe <output/wav> <time/milliseconds>"); return(1); } output_file = args[0]; time = 0; break; case 2: output_file = args[0]; try { time = Int32.Parse(args[1]); } catch { time = 0; } break; default: time = 0; output_file = "record.wav"; break; } int sampleRate = 48000; int bitsPerSample = 24; //create a new soundIn instance using (WasapiCapture soundIn = new WasapiLoopbackCapture()) { //initialize the soundIn instance soundIn.Initialize(); //create a SoundSource around the the soundIn instance SoundInSource soundInSource = new SoundInSource(soundIn) { FillWithZeros = false }; //create a source, that converts the data provided by the soundInSource to any other format IWaveSource convertedSource = soundInSource .ChangeSampleRate(sampleRate) // sample rate .ToSampleSource() .ToWaveSource(bitsPerSample); //bits per sample //channels... using (convertedSource = convertedSource.ToStereo()) { //create a new wavefile using (WaveWriter waveWriter = new WaveWriter(output_file, convertedSource.WaveFormat)) { //register an event handler for the DataAvailable event of the soundInSource soundInSource.DataAvailable += (s, e) => { //read data from the converedSource byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]; int read; //keep reading as long as we still get some data while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0) { //write the read data to a file waveWriter.Write(buffer, 0, read); } }; //start recording soundIn.Start(); //delay and keep recording if (time != 0) { Thread.Sleep(time); } else { Console.ReadKey(); } //stop recording soundIn.Stop(); } } } return(0); }
static void Main(string[] args) { myConfig = System.Reflection.Assembly.GetExecutingAssembly().GetName().Name + ".cfg"; if (!ReadConfig() & args.Length == 0) { WriteLog("ERROR! (Main): Can't read a configuration file. Please, create a new one."); Array.Resize(ref args, 1); args[0] = "/config"; } string myFullName = System.Reflection.Assembly.GetExecutingAssembly().GetName().FullName; WriteLog("(Main): Program started. (" + myFullName + ")"); string sArgs = string.Join(" ", args); WriteLog("(Main): Arguments: " + sArgs); if (sArgs.Contains(@"/?")) { Console.WriteLine(""); Console.WriteLine("/? - Print this message and exit."); Console.WriteLine("/config - Configure program and exit."); Console.WriteLine("\n Press any key to exit..."); Console.ReadKey(); Exit(0); } if (sArgs.Contains(@"/config")) { Console.WriteLine(""); Console.WriteLine($"Please, enter a full path of media player application (default is: {appPath}): "); string a = Console.ReadLine(); if (a.Length == 0) { a = appPath; } if (!IsValidFullPath(a)) { Console.WriteLine("ERROR! Wrong application path."); a = appPath; } appPath = a; appName = string.Join(".", Pop(System.IO.Path.GetFileName(appPath).Split('.'))); Console.WriteLine($"Please, enter arguments for application if needed (default is: {appARGV}): "); a = Console.ReadLine(); if (a != null & a.Length > 0) { appARGV = a; } Console.WriteLine($"Please, enter a value of delay (in seconds >=5 ) before application will be restarted (default is: {delayBeforeRestartProgram}): "); a = Console.ReadLine(); bool e = false; if (a.Length == 0) { a = delayBeforeRestartProgram.ToString(); } int b = StrToInt(a, ref e); if (b >= 5 & !e) { delayBeforeRestartProgram = b; } else { Console.WriteLine("ERROR! Wrong delay value! Should be >=5."); } while (true) { Console.WriteLine("Is configuration below correct?"); Console.WriteLine($"\nappPath: {appPath}\nappName: {appName}\nappARGV: {appARGV}\ndelayBeforeRestartProgram: {delayBeforeRestartProgram}"); Console.Write("(Y/N): "); a = Console.ReadLine(); if (a == "Y" || a == "y") { WriteConfig(); break; } else if (a == "N" || a == "n") { Console.WriteLine("Please, rerun this application with /config argument to try again."); break; } } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); Exit(0); } var soundIn = new WasapiLoopbackCapture(); WriteLog("(Main): Working with: " + soundIn.Device.FriendlyName); try { soundIn.Initialize(); } catch { WriteLog("ERROR! (Main): Error while initializing device(39). Exiting."); Exit(1); } var soundInSource = new SoundInSource(soundIn); try { ISampleSource source = soundInSource.ToSampleSource(); soundInSource.DataAvailable += (s, aEvent) => NewData(source); } catch { WriteLog("ERROR! (Main): Error while initializing device(50). Exiting."); Exit(1); } WriteLog("(Main): Trying to start sound capturing..."); try { soundIn.Start(); Thread.Sleep(2000); if (!newDataIsRunning & !noSoundIsRunning) { Thread noSound = new Thread(NoSound); noSound.IsBackground = true; noSound.Start(); } } catch { WriteLog("ERROR! (Main): Error while sound capturing. Exiting."); Exit(1); } WriteLog("(Main): Started."); }
void loopRecord() { // Get current window title of active window string title = GetWindowTitle(); // Wait for the title to change, check 10 times per second while (title == GetWindowTitle() || GetWindowTitle() == "Advertisement" || GetWindowTitle() == "Spotify") { Thread.Sleep(100); } updateWindowNameDisplay(); btn_toggleRecord.Invoke((MethodInvoker) delegate { btn_toggleRecord.Text = "Recording..."; btn_toggleRecord.BackColor = Color.LightGreen; btn_toggleRecord.ForeColor = Color.White; }); while (!stopRecording) { using (WasapiCapture capture = new WasapiLoopbackCapture()) { currentlyplaying cp = null; while (cp == null) { cp = get_Currently_Playing(); } if (cp.item != null) { string filename = cp.item.id; // GetWindowTitle(); //rtxt_songlist.Invoke((MethodInvoker)delegate { // // Running on the UI thread // rtxt_songlist.Text += filename + "\n"; //}); // rtxt_songlist.Text += filename + "\n"; //foreach (char c in System.IO.Path.GetInvalidFileNameChars()) //{ // filename = filename.Replace(c, '_'); //} //initialize the selected device for recording capture.Initialize(); if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } //create a wavewriter to write the data to using (WaveWriter w = new WaveWriter(path + "\\" + filename + ".wav", capture.WaveFormat)) { //setup an eventhandler to receive the recorded data capture.DataAvailable += (s, E) => { //save the recorded audio w.Write(E.Data, E.Offset, E.ByteCount); }; //start recording capture.Start(); //for (int i = 0; i < 100; i++) //{ // Thread.Sleep(time / 100); // prog_recording.Value = 1 * i; //} // Get current window title of active window string newTitle = GetWindowTitle(); // Wait for the title to change, check 10 times per second while (newTitle == GetWindowTitle()) { Thread.Sleep(100); updateWindowNameDisplay(); } //stop recording capture.Stop(); updateWindowNameDisplay(); while (GetWindowTitle() == "Advertisement" || GetWindowTitle() == "Spotify") { Thread.Sleep(100); updateWindowNameDisplay(); } convertTagAsynch(path, filename, cp); // Thread.Sleep(time); } } } if (title == GetWindowTitle()) { stopRecording = true; } } btn_toggleRecord.Invoke((MethodInvoker) delegate { btn_toggleRecord.Text = "Record"; btn_toggleRecord.BackColor = Color.FromArgb(30, 30, 30); btn_toggleRecord.ForeColor = Color.White; }); }
public async Task AudioStreamCommand([ Summary("Voice Channel name")] IVoiceChannel channel = null, [Summary("Number of audio channels, 1 for mono, 2 for stereo (Default)")] int nAudioChannels = 2, [Summary("Sample rate in hertz, 48000 (Default)")] int sampleRate = 48000, [Summary("Number of bits per sample, 16 (Default)")] int bitsPerSample = 16) { var connection = await channel.ConnectAsync(); var dstream = connection.CreatePCMStream(AudioApplication.Mixed); using (WasapiCapture soundIn = new WasapiLoopbackCapture()) { //initialize the soundIn instance soundIn.Initialize(); //create a SoundSource around the the soundIn instance //this SoundSource will provide data, captured by the soundIn instance SoundInSource soundInSource = new SoundInSource(soundIn) { FillWithZeros = false }; //create a source, that converts the data provided by the //soundInSource to any other format //in this case the "Fluent"-extension methods are being used IWaveSource convertedSource = soundInSource .ChangeSampleRate(sampleRate) // sample rate .ToSampleSource() .ToWaveSource(bitsPerSample); //bits per sample //int channels = 2; //channels... using (convertedSource = nAudioChannels == 1 ? convertedSource.ToMono() : convertedSource.ToStereo()) { //register an event handler for the DataAvailable event of //the soundInSource //Important: use the DataAvailable of the SoundInSource //If you use the DataAvailable event of the ISoundIn itself //the data recorded by that event might won't be available at the //soundInSource yet soundInSource.DataAvailable += (s, e) => { //read data from the converedSource //important: don't use the e.Data here //the e.Data contains the raw data provided by the //soundInSource which won't have your target format byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]; int read; //keep reading as long as we still get some data //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0) { //write the read data to a file // ReSharper disable once AccessToDisposedClosure dstream.Write(buffer, 0, read); } }; //we've set everything we need -> start capturing data soundIn.Start(); Console.WriteLine("Capturing started ... press any key to stop."); Console.ReadKey(); soundIn.Stop(); } } }
public void StartListen() { _isRunning = true; _loopbackCapture = new WasapiLoopbackCapture(); _loopbackCapture.Initialize(); _soundInSource = new SoundInSource(_loopbackCapture); _basicSpectrumProvider = new BasicSpectrumProvider(_soundInSource.WaveFormat.Channels, _soundInSource.WaveFormat.SampleRate, CFftSize); LineSpectrum lineSpectrum50 = new LineSpectrum(CFftSize) { SpectrumProvider = _basicSpectrumProvider, BarCount = 50, UseAverage = true, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Linear }; LineSpectrum lineSpectrum200 = new LineSpectrum(CFftSize) { SpectrumProvider = _basicSpectrumProvider, BarCount = 200, UseAverage = true, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Linear }; LineSpectrum lineSpectrum1000 = new LineSpectrum(CFftSize) { SpectrumProvider = _basicSpectrumProvider, BarCount = 1000, UseAverage = true, IsXLogScale = true, ScalingStrategy = ScalingStrategy.Linear }; _loopbackCapture.Start(); _singleBlockNotificationStream = new SingleBlockNotificationStream(_soundInSource.ToSampleSource()); _realtimeSource = _singleBlockNotificationStream.ToWaveSource(); byte[] buffer = new byte[_realtimeSource.WaveFormat.BytesPerSecond / 2]; _soundInSource.DataAvailable += (s, ea) => { int read; while (_isRunning && (read = _realtimeSource.Read(buffer, 0, buffer.Length)) > 0) { float[] audioData50 = lineSpectrum50.GetSpectrumData(MaxAudioValue); float[] audioData200 = lineSpectrum200.GetSpectrumData(MaxAudioValue); float[] audioData1000 = lineSpectrum1000.GetSpectrumData(MaxAudioValue); if (audioData50 != null && audioData200 != null && audioData1000 != null && _receiveAudio != null) { _receiveAudio(audioData50, audioData200, audioData1000); } } }; _singleBlockNotificationStream.SingleBlockRead += singleBlockNotificationStream_SingleBlockRead; }
public AudioProcessor(Publisher publisher, string outlet = null) { using WasapiCapture capture = new WasapiLoopbackCapture(CAPTURE_LATENCY); capture.Initialize(); channelNum = capture.WaveFormat.Channels; systemSampleRate = capture.WaveFormat.SampleRate; using SoundInSource captureSource = new SoundInSource(capture) { FillWithZeros = false }; using SimpleNotificationSource notificationSource = new SimpleNotificationSource(FluentExtensions.ToSampleSource(captureSource)) { Interval = PROCESS_WINDOW_LENGTH }; InitializeMonoBuffers(monoBuffers, channelNum, notificationSource.BlockCount); blockBuffer = new float[notificationSource.BlockCount * channelNum]; lpf = new LowpassFilter(systemSampleRate, LFE_CUTOFF); MonoPulseDetector = new SimplePulseDetector(monoBuffers, lfeProvided: false, biQuadFilter: lpf); localisationer = new Localisationer(monoBuffers); if (channelNum > 2) { LFEPulseDetector = new SimplePulseDetector(monoBuffers, lfeProvided: true); } capture.DataAvailable += (s, e) => { while (notificationSource.Read(blockBuffer, 0, notificationSource.BlockCount * channelNum) > 0) { monoBuffers = Deinterlacing(monoBuffers, blockBuffer, channelNum); if (LFEPulseDetector != null) { bool m = MonoPulseDetector.Predict(); bool l = LFEPulseDetector.Predict(); if (m || l) { double angle = localisationer.GetLoudestAngle(); #if DEBUG Console.Clear(); Console.WriteLine($"LFE Level: {LFEPulseDetector.CurrentReading:F3}, LFE Threshold: {LFEPulseDetector.CurrentThreshold:F3}"); Console.WriteLine($"Mixed Level: {MonoPulseDetector.CurrentReading:F3}, Mixed Threshold: {MonoPulseDetector.CurrentThreshold:F3}"); Console.WriteLine($"Impulse Detected - Mono:{m}, LFE:{l}, Angle: {angle:F3}, Hit Count:{hitCount}"); #endif if (publisher != null && outlet != null) { publisher.Publish(outlet, $"{m}|{l}|{angle:F3}"); } hitCount++; } } else { if (MonoPulseDetector.Predict()) { double angle = localisationer.GetLoudestAngle(); #if DEBUG Console.Clear(); Console.WriteLine($"Level: {MonoPulseDetector.CurrentReading:F3}, Threshold: {MonoPulseDetector.CurrentThreshold:F3}"); Console.WriteLine($"Impulse Detected - Mono, Angle:{angle:F3}, Hit Count:{hitCount}"); #endif if (publisher != null && outlet != null) { publisher.Publish(outlet, $"True|False|{angle:F3}"); } hitCount++; } } } }; StartCapturingAndHold(capture); }
public void Record(string waveFileName, string mp3FileName, bool includeMp3) { int latency = 5; int sampleRate = 320000;//44100; int bits = 32; int channels = 2; //var encoding = AudioEncoding.MpegLayer3; WaveFormat waveFormat = new WaveFormat(sampleRate, bits, channels); using (WasapiCapture capture = new WasapiLoopbackCapture(latency, waveFormat, ThreadPriority.Highest)) { Dictionary <int, MMDevice> devices = new Dictionary <int, MMDevice>(); int i = 1; foreach (MMDevice device in MMDeviceEnumerator.EnumerateDevices(DataFlow.Render)) { devices.Add(i, device); i++; } ColorConsole.WriteLine("Available devices:", ConsoleColor.Blue); foreach (var x in devices) { if (x.Value.FriendlyName == capture.Device.FriendlyName) { ColorConsole.WriteLine(x.Key + ". " + x.Value.FriendlyName + " [default]", ConsoleColor.Cyan); } else { ColorConsole.WriteLine(x.Key + ". " + x.Value.FriendlyName, ConsoleColor.DarkMagenta); } } bool optionSelected = false; while (!optionSelected) { ColorConsole.Write("Select which device above to record from 1,2,3... (Enter for default - ", ConsoleColor.White, capture.Device.FriendlyName); ColorConsole.Write("{0}", ConsoleColor.Cyan, capture.Device.FriendlyName); ColorConsole.Write(") $", ConsoleColor.White, capture.Device.FriendlyName); ConsoleKeyInfo key = Console.ReadKey(); string keystring = key.KeyChar.ToString(); if (key.Key == ConsoleKey.Enter) { optionSelected = true; } else if (int.TryParse(keystring, out var result)) { capture.Device = devices[result]; optionSelected = true; } } Console.WriteLine(); ColorConsole.WriteLine("Recording initialising", ConsoleColor.Blue); capture.Initialize(); using (WaveRecorder waveRecorder = new WaveRecorder(waveFileName, capture)) { ColorConsole.Write("Press ENTER start recording $", ConsoleColor.White); WaitForEnter(); ColorConsole.WriteLine("Writing wave to file '{0}'", ConsoleColor.Blue, waveFileName); waveRecorder.StartRecording(); ColorConsole.Write("Recording... Press ENTER to end recording $", ConsoleColor.White); WaitForEnter(); waveRecorder.EndRecording(); } ColorConsole.WriteLine("Finished recording", ConsoleColor.Blue, Path.GetFullPath(waveFileName)); ColorConsole.WriteLine("{0} written to disk.", ConsoleColor.Blue, Path.GetFullPath(waveFileName)); if (includeMp3) { ToMp3 toMp3 = new ToMp3(); ColorConsole.WriteLine("Creating mp3 from wav...", ConsoleColor.Blue, Path.GetFullPath(mp3FileName)); toMp3.ConvertFromWave(waveFileName, mp3FileName); ColorConsole.WriteLine("Finished creating mp3", ConsoleColor.Blue, Path.GetFullPath(mp3FileName)); ColorConsole.WriteLine("{0} written to disk.", ConsoleColor.Blue, Path.GetFullPath(mp3FileName)); ColorConsole.Write("Press ENTER to exit application $", ConsoleColor.White, Path.GetFullPath(mp3FileName)); WaitForEnter(); } } }
static async Task MainAsync() { Console.Title = "Audio Streamer - PC to Android"; IPAddress IPAddr; bool UseAdb = false; try { var AdbDevices = Process.Start(new ProcessStartInfo() { FileName = "adb", Arguments = "devices", UseShellExecute = false, RedirectStandardOutput = true }); await AdbDevices.StandardOutput.ReadLineAsync(); UseAdb = !string.IsNullOrWhiteSpace(await AdbDevices.StandardOutput.ReadLineAsync()); } catch (System.ComponentModel.Win32Exception) { } if (UseAdb) { IPAddr = IPAddress.Loopback; } else { Console.Write("IP: "); IPAddr = IPAddress.Parse(Console.ReadLine()); } Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.High; using (Capture = new WasapiLoopbackCapture(0, new CSCore.WaveFormat(), ThreadPriority.Highest)) { while (true) { var NoSpamDelay = Task.Delay(1000); if (UseAdb) { Process.Start(new ProcessStartInfo() { FileName = "adb", Arguments = "forward tcp:1420 tcp:1420", UseShellExecute = false }); } using (var Conn = new TcpClient() { NoDelay = true, ReceiveBufferSize = 64, SendBufferSize = 1 << 12 //2^12 = ~4000 so 1000 floats }) { try { await Conn.ConnectAsync(IPAddr, ServerPort); Stream = Conn.GetStream(); if (Stream.ReadByte() == 1) { Console.WriteLine("Connected to " + IPAddr.ToString()); Capture.Initialize(); using (Source = new SoundInSource(Capture)) { int SampleRateServer = Source.WaveFormat.SampleRate; int SampleRateClient = Stream.ReadByte() | Stream.ReadByte() << 8 | Stream.ReadByte() << 16; if (SampleRateClient != SampleRateServer) { Console.WriteLine($"Sample rate mismatch, PC was {SampleRateServer} Hz but client was {SampleRateClient} Hz"); Console.WriteLine("Adjust your PC's sample rate then press any key to try again"); Console.ReadKey(); Console.Clear(); } else { // Start Capturing Source.DataAvailable += DataAvailable; Capture.Start(); Console.WriteLine($"Started recording audio at {SampleRateServer} Hz"); Window.SetWindowShown(false); // Stop Capturing await(DisconnectWaiter = new TaskCompletionSource <bool>()).Task; await Task.Run(() => Capture.Stop()); Window.SetWindowShown(true); Console.WriteLine("Disconnected, stopped recording audio"); } } } } catch { } await NoSpamDelay; } } } }
private bool Initialize() { if (_initialized) { return(true); } var currentTicks = Environment.TickCount; if (currentTicks - _lastInitializeTickCount < 1000) { return(false); } _lastInitializeTickCount = currentTicks; try { _soundIn = new WasapiLoopbackCapture(); _soundIn.Initialize(); } catch (Exception e) { Logger.Debug(e, "Failed to initialize WasapiLoopbackCapture!"); return(false); } Logger.Debug($"Initialized WasapiLoopbackCapture on \"{_soundIn.Device.FriendlyName}\""); var soundInSource = new SoundInSource(_soundIn); var sampleSource = soundInSource.ToSampleSource(); const FftSize fftSize = FftSize.Fft1024; _fftBuffer = new float[(int)fftSize]; _spectrumProvider = new SpectrumProvider(sampleSource.WaveFormat.Channels, sampleSource.WaveFormat.SampleRate, fftSize); var notificationSource = new DataNotificationSource(sampleSource); notificationSource.DataRead += (s, e) => _spectrumProvider.Add(e.Data, e.Data.Length); var waveSource = notificationSource.ToWaveSource(16); var buffer = new byte[waveSource.WaveFormat.BytesPerSecond / 2]; soundInSource.DataAvailable += (s, e) => { while (waveSource.Read(buffer, 0, buffer.Length) > 0) { ; } }; _spectrum = new LedSpectrum(Config.ColorGradient) { FftSize = fftSize, SpectrumProvider = _spectrumProvider, UseAverage = Config.UseAverage, MinimumFrequency = Config.MinimumFrequency, MaximumFrequency = Config.MaximumFrequency, ScalingStrategy = Config.ScalingStrategy, ScalingFactor = Config.ScalingFactor, IsXLogScale = false }; _soundIn.Start(); _initialized = true; return(true); }
public AudioProcessor(Publisher publisher, string outlet = null) { WasapiCapture capture = new WasapiLoopbackCapture(CAPTURE_LATENCY); capture.Initialize(); channelNum = capture.WaveFormat.Channels; systemSampleRate = capture.WaveFormat.SampleRate; SoundInSource captureSource = new SoundInSource(capture) { FillWithZeros = false }; SimpleNotificationSource notificationSource = new SimpleNotificationSource(FluentExtensions.ToSampleSource(captureSource)) { Interval = PROCESS_WINDOW_LENGTH }; InitializeMonoBuffers(pcmBuffers, channelNum, notificationSource.BlockCount); blockBuffer = new float[notificationSource.BlockCount * channelNum]; capture.DataAvailable += (s, e) => { while (notificationSource.Read(blockBuffer, 0, notificationSource.BlockCount * channelNum) > 0) { // Extracted audio signal pcmBuffers = Deinterlacing(pcmBuffers, blockBuffer, channelNum); CSCore.Utils.Complex[] data = new CSCore.Utils.Complex[pcmBuffers[0].Length]; for (int i = 0; i < pcmBuffers.Count; i++) { data[i].Real = pcmBuffers[0][i]; data[i].Imaginary = 0; } //DetectPitch(pcmBuffers); counter++; if (counter % 30 == 0) { Console.Clear(); FastFourierTransformation.Fft(data, 8); //Console.WriteLine(data.Length); foreach (var d in data) { Console.Write(Math.Round(d.Value, 4) + " "); } Console.WriteLine(); } // TODO: Implement your model //publisher.Publish(outlet, "OUTPUT MESSAGE TO NEXT OUTLET"); } }; StartCapturingAndHold(capture); }