public SoundPlayer(SoundDevice device, SoundSettings settings) { _soundSettings = settings; _inputDevice = device.Device; if (_inputDevice.DataFlow == DataFlow.Render) { _capture = new WasapiLoopbackCapture(_inputDevice) { ShareMode = AudioClientShareMode.Shared }; _log.Debug($"Initialized WasapiLoopbackCapture for device {_inputDevice.FriendlyName} of type {_inputDevice.DataFlow}. ShareMode: {_capture.ShareMode}, State: {_inputDevice.State}"); _log.Debug($"Capturing in format: {_capture.WaveFormat} {_capture.WaveFormat.BitsPerSample}bit {_capture.WaveFormat.SampleRate}Hz {_capture.WaveFormat.Channels} channels"); _captureProvider = new WaveInProvider(_capture); _remoteOutput = new DefaultResampler(_capture.WaveFormat).Resample(_captureProvider); _remoteBuffer = new BufferedWaveProvider(SoundSettings.DiscordFormat); _capture.DataAvailable += (s, e) => { _remoteOutput.Read(buf, 0, e.BytesRecorded / 2); _remoteBuffer.AddSamples(buf, 0, e.BytesRecorded / 2); }; } else { _capture = new WasapiCapture(_inputDevice) { ShareMode = AudioClientShareMode.Shared }; _log.Debug($"Initialized WasapiCapture for device {_inputDevice.FriendlyName} of type {_inputDevice.DataFlow}. ShareMode: {_capture.ShareMode}, State: {_inputDevice.State}."); _log.Debug($"Capturing in format: {_capture.WaveFormat} {_capture.WaveFormat.BitsPerSample}bit {_capture.WaveFormat.SampleRate}Hz {_capture.WaveFormat.Channels} channels"); _captureProvider = new WaveInProvider(_capture); _remoteOutput = new DefaultResampler(_capture.WaveFormat).Resample(_captureProvider); var captureRate = _capture.WaveFormat.AverageBytesPerSecond; var outputRate = SoundSettings.DiscordFormat.AverageBytesPerSecond; _log.Info($"Capture rate is {captureRate}, output rate is {outputRate}, that gives a ratio of {(float)captureRate / (float)outputRate}"); _log.Debug($"Outputting in format: {_remoteOutput.WaveFormat} {_remoteOutput.WaveFormat.BitsPerSample}bit {_remoteOutput.WaveFormat.SampleRate}Hz {_remoteOutput.WaveFormat.Channels} channels"); var rate = (float)SoundSettings.DiscordFormat.AverageBytesPerSecond / _capture.WaveFormat.AverageBytesPerSecond; _remoteBuffer = new BufferedWaveProvider(_remoteOutput.WaveFormat); _capture.DataAvailable += (s, e) => { _remoteOutput.Read(buf, 0, (int)Math.Round(e.BytesRecorded * rate)); _remoteBuffer.AddSamples(buf, 0, (int)Math.Round(e.BytesRecorded * rate)); }; } if (_soundSettings.PlayLocally) { _localOutput = new WasapiOut(); _localBuffer = new BufferedWaveProvider(_remoteOutput.WaveFormat); _localOutput.Init(_localBuffer); _capture.DataAvailable += (s, e) => { _localBuffer.AddSamples(buf, 0, e.BytesRecorded / 2); }; } }
public void shutDown() { _waveInStream.StopRecording(); _waveIn16Stream = null; _waveTemp = null; _waveInStream.Dispose(); _waveInStream = null; }
/// <summary> /// Stop all playback and recording of the mixer. Switch to the new input device and start recording. /// </summary> public void AttachInputMicrophone() { InitializeWaveOutWaveIn(); System.Threading.Thread.Sleep(300);//to prevent looping of sound when switching to the virtual audio cable int deviceNumber = -1; if (!cbInputDevices.Dispatcher.CheckAccess()) { Action action = () => deviceNumber = cbInputDevices.SelectedIndex; cbInputDevices.Dispatcher.Invoke(action); } else { deviceNumber = cbInputDevices.SelectedIndex; } if (deviceNumber == -1) { sourceStream = new WasapiCapture((MMDevice)cbInputDevices.Items[0]); } else { if (!cbInputDevices.Dispatcher.CheckAccess()) { Action action = () => sourceStream = new WasapiCapture((MMDevice)(cbInputDevices.SelectedItem as ComboboxItem).Value); cbInputDevices.Dispatcher.Invoke(action); } else { sourceStream = new WasapiLoopbackCapture((MMDevice)cbInputDevices.SelectedItem); } } //set the input waveIn to the input device selected WaveInProvider waveIn = new WaveInProvider(sourceStream); //waveOut = Where the mic output will go if (!cbOutputDevices.Dispatcher.CheckAccess()) { Action action = () => waveOut.DeviceNumber = cbOutputDevices.SelectedIndex;; cbOutputDevices.Dispatcher.Invoke(action); } else { waveOut.DeviceNumber = cbOutputDevices.SelectedIndex; } //waveOut.DeviceNumber = 0;//digital audio cable waveOut.DesiredLatency = 120; waveOut.Init(waveIn); waveOut.Play(); sourceStream.StartRecording(); }
void doMagic02_WORKS() { WasapiLoopbackCapture wlc = new WasapiLoopbackCapture(settings.devRec.mm); WaveInProvider waveIn = new WaveInProvider(wlc); WasapiOut waveOut = new WasapiOut(settings.devOut.mm, AudioClientShareMode.Shared, false, 100); waveOut.Init(waveIn); wlc.StartRecording(); waveOut.Play(); }
public void Enable() { try { ipep = new IPEndPoint(IPAddress.Parse(Micobject.settings.sourcename), 8092); client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); client.Connect(ipep); logger.Info("与对方服务器{0}通信连接成功。", Micobject.settings.sourcename); } catch (SocketException se) { logger.Error("Socket异常:{0}", se.Message); throw; } catch (Exception ex) { logger.Error("通信异常", ex.Message); } try { _sampleRate = Micobject.settings.samples; _bitsPerSample = Micobject.settings.bits; _channels = Micobject.settings.channels; RecordingFormat = new WaveFormat(_sampleRate, _bitsPerSample, _channels); _waveIn = new WaveInEvent { BufferMilliseconds = 40, DeviceNumber = 0, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); _meteringProvider = new MeteringSampleProvider(_sampleChannel); //_meteringProvider.StreamVolume += _meteringProvider_StreamVolume; } catch (Exception exc) { logger.Error(exc.Message); } try { _waveIn.StartRecording(); logger.Info("开始接收语音信号。"); } catch (Exception ex) { logger.Error(ex.Message); } }
/// <summary> /// Start audio source. /// </summary> /// /// <remarks>Starts audio source and return execution to caller. audio source /// object creates background thread and notifies about new frames with the /// help of <see cref="DataAvailable"/> event.</remarks> /// /// <exception cref="ArgumentException">audio source is not specified.</exception> /// public void Start() { if (string.IsNullOrEmpty(_source)) { throw new ArgumentException("Audio source is not specified."); } if (_started) { return; } // check source lock (_lock) { if (_started) { return; } int i = 0, selind = -1; for (var n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == _source) { selind = i; } i++; } if (selind == -1) { AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost)); return; } _started = true; _waveIn = new WaveInEvent { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; _waveIn.StartRecording(); } }
public Speakers() { _recording = false; _soundServer = new UDPServer(0); _waveInStream = new WasapiLoopbackCapture(); _waveTemp = new WaveInProvider(_waveInStream); _waveIn16Stream = new WaveFloatTo16Provider(_waveTemp); //Console.Out.WriteLine("Sample rate: " + _waveInStream.WaveFormat.SampleRate); /*Console.Out.WriteLine("\nEnconding: " + _waveInStream.WaveFormat.Encoding); * Console.Out.WriteLine("Sample rate: " + _waveInStream.WaveFormat.SampleRate); * Console.Out.WriteLine("Bits per sample: " + _waveInStream.WaveFormat.BitsPerSample); * Console.Out.WriteLine("Channels: " + _waveInStream.WaveFormat.Channels);*/ }
/// <summary> /// Start audio source. /// </summary> /// /// <remarks>Starts audio source and return execution to caller. audio source /// object creates background thread and notifies about new frames with the /// help of <see cref="DataAvailable"/> event.</remarks> /// /// <exception cref="ArgumentException">audio source is not specified.</exception> /// public void Start() { if (!IsRunning) { // check source int i = 0, selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == _source) { selind = i; } i++; } if (selind == -1) { //device no longer connected or not configured if (i > 0) { selind = 0; } else { //if (AudioSourceError != null) // AudioSourceError(this, new AudioSourceErrorEventArgs("not connected")); if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.DeviceLost); } return; } } _waveIn = new WaveIn { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); if (LevelChanged != null) { _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } _waveIn.StartRecording(); } }
private void StartLoopback() { if (isInputStream) { loopbackStream = new DirectSoundOut(); waveInProvider = new WaveInProvider(sourceInStream); equalizer = new Equalizer(waveInProvider.ToSampleProvider(), eqBand); loopbackStream.Init(equalizer); loopbackStream.Play(); } }
private void Button8_Click(object sender, EventArgs e) { int deviceNumber = comboBoxAudio.SelectedIndex; sourceStream = new WaveIn(); sourceStream.DeviceNumber = deviceNumber; sourceStream.WaveFormat = new WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels); WaveInProvider waveIn = new WaveInProvider(sourceStream); waveOut = new DirectSoundOut(); waveOut.Init(waveIn); sourceStream.StartRecording(); waveOut.Play(); }
public Record() { List <IMFActivate> devices = new List <IMFActivate>(); foreach (var i in MediaFoundationApi.EnumDeviceSources()) { devices.Add(i); } MediaFoundationCapturer capturer = new MediaFoundationCapturer(devices[0]); WaveInProvider waveIn = new WaveInProvider(capturer); capturer.StartRecording(); Thread.Sleep(5000); capturer.StopRecording(); MediaFoundationEncoder.EncodeToWma(waveIn, @"C:\record.mp3"); }
private void StopLoopback() { if (loopbackStream != null) { loopbackStream.Stop(); loopbackStream.Dispose(); loopbackStream = null; } if (waveInProvider != null) { waveInProvider = null; } if (equalizer != null) { equalizer = null; } }
private void StartSourcesButton_Click(object sender, RoutedEventArgs e) { if (SourcesList.Items.Count == 0) { return; } sourceStream = new WaveIn(); sourceStream.DeviceNumber = 0; sourceStream.WaveFormat = new WaveFormat(44100, WaveIn.GetCapabilities(0).Channels); WaveInProvider waveIn = new WaveInProvider(sourceStream); waveOut = new DirectSoundOut(); waveOut.Init(waveIn); sourceStream.StartRecording(); waveOut.Play(); }
private void Button2_Click(object sender, EventArgs e) { if (listView1.SelectedItems.Count == 0) { return; } int deviceNum = listView1.SelectedItems[0].Index; sourceStream = new WaveIn(); sourceStream.DeviceNumber = deviceNum; sourceStream.WaveFormat = new WaveFormat(44100, WaveIn.GetCapabilities(deviceNum).Channels); WaveInProvider waveIn = new WaveInProvider(sourceStream); waveOut = new DirectSoundOut(); waveOut.Init(waveIn); sourceStream.StartRecording(); waveOut.Play(); }
/// <summary> /// Start audio source. /// </summary> /// /// <remarks>Starts audio source and return execution to caller. audio source /// object creates background thread and notifies about new frames with the /// help of <see cref="DataAvailable"/> event.</remarks> /// /// <exception cref="ArgumentException">audio source is not specified.</exception> /// public void Start() { if (!IsRunning) { // check source if (string.IsNullOrEmpty(_source)) { throw new ArgumentException("Audio source is not specified."); } int i = 0, selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == _source) { selind = i; } i++; } if (selind == -1) { //device no longer connected if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs("not connected")); } return; } _waveIn = new WaveIn { BufferMilliseconds = 200, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; _waveIn.StartRecording(); } }
public void StartPreview(int mic, int speakers) { try { _waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback()) { BufferMilliseconds = 100, DeviceNumber = mic, WaveFormat = new WaveFormat(8000, 16, 1) }; var pcm = new WaveInProvider(_waveIn); Volume = new SampleChannel(pcm); var downsample = new WdlResamplingSampleProvider(Volume, 44100); var filter = new RadioFilter(downsample); _waveOut = new WaveOut { DesiredLatency = 100, DeviceNumber = speakers }; //75ms latency in output buffer _waveOut.Init(filter); _waveIn.StartRecording(); _waveOut.Play(); } catch (Exception ex) { Logger.Error(ex, "Error starting audio Quitting!"); Environment.Exit(1); } }
private void loopback_Click(object sender, EventArgs e) { if (sourceList.SelectedItems.Count == 0) { return; } int deviceIndex = sourceList.SelectedItems[0].Index; mother.mainWaveIn = new WaveIn { DeviceNumber = deviceIndex, WaveFormat = new WaveFormat(44100, WaveIn.GetCapabilities(deviceIndex).Channels) }; WaveInProvider waveIn = new WaveInProvider(mother.mainWaveIn); mother.mainDirectSoundOut = new DirectSoundOut(); mother.mainDirectSoundOut.Init(waveIn); mother.mainWaveIn.StartRecording(); mother.mainDirectSoundOut.Play(); }
public WaveInputDeviceAttachment() { device = new WaveInEvent(); provider = new WaveInProvider(device); }
public void Enable() { if (audioMode == 0) { _processing = true; _sampleRate = Micobject.settings.samples; _bitsPerSample = Micobject.settings.bits; _channels = Micobject.settings.channels; RecordingFormat = new WaveFormat(_sampleRate, _bitsPerSample, _channels); //local device int i = 0, selind = -1; for (int n = 0; n < WaveIn.DeviceCount; n++) { if (WaveIn.GetCapabilities(n).ProductName == Micobject.settings.sourcename) { selind = i; } i++; } if (selind == -1) { //device no longer connected Micobject.settings.active = false; NoSource = true; _processing = false; return; } _waveIn = new WaveIn { BufferMilliseconds = 40, DeviceNumber = selind, WaveFormat = RecordingFormat }; _waveIn.DataAvailable += WaveInDataAvailable; _waveIn.RecordingStopped += WaveInRecordingStopped; _waveProvider = new WaveInProvider(_waveIn); _sampleChannel = new SampleChannel(_waveProvider); //_sampleChannel.Volume = 0.01f; _meteringProvider = new MeteringSampleProvider(_sampleChannel); _meteringProvider.StreamVolume += _meteringProvider_StreamVolume; try { _waveIn.StartRecording(); } catch (Exception ex) { //MainForm.LogExceptionToFile(ex); //MessageBox.Show(LocRM.GetString("AudioMonitoringError") + ": " + ex.Message, LocRM.GetString("Error")); _processing = false; return; } NoSource = false; Micobject.settings.active = true; //MainForm.NeedsSync = true; Invalidate(); _processing = false; } }