private static void Main() { DefaultMediaDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); DefaultMediaDevice.AudioEndpointVolume.OnVolumeNotification += new AudioEndpointVolumeNotificationDelegate(AudioEndpointVolume_OnVolumeNotification); TrayIcon = new NotifyIcon(); TrayIcon.Icon = IconFromVolume(); TrayIcon.Text = ToolTipFromVolume(); TrayIcon.MouseClick += new MouseEventHandler(TrayIcon_MouseClick); TrayIcon.MouseDoubleClick += new MouseEventHandler(TrayIcon_MouseDoubleClick); TrayIcon.Visible = true; TrayIcon.ContextMenu = new ContextMenu(); TrayIcon.ContextMenu.MenuItems.Add(new MenuItem("Open Volume Mixer", (o, e) => { Process.Start(SystemDir + "sndvol.exe"); })); TrayIcon.ContextMenu.MenuItems.Add(new MenuItem("-")); TrayIcon.ContextMenu.MenuItems.Add(new MenuItem("Playback devices", (o, e) => { Process.Start(SystemDir + "rundll32.exe", @"Shell32.dll,Control_RunDLL mmsys.cpl,,playback"); })); TrayIcon.ContextMenu.MenuItems.Add(new MenuItem("Recording devices", (o, e) => { Process.Start(SystemDir + "rundll32.exe", @"Shell32.dll,Control_RunDLL mmsys.cpl,,recording"); })); TrayIcon.ContextMenu.MenuItems.Add(new MenuItem("Sounds", (o, e) => { Process.Start(SystemDir + "rundll32.exe", @"Shell32.dll,Control_RunDLL mmsys.cpl,,sounds"); })); TrayIcon.ContextMenu.MenuItems.Add(new MenuItem("-")); TrayIcon.ContextMenu.MenuItems.Add(new MenuItem("Volume control options", (o, e) => { Process.Start(SystemDir + "sndvol.exe", "-p"); })); SingleClickWindow = new Timer(); SingleClickWindow.Interval = SystemInformation.DoubleClickTime; SingleClickWindow.Tick += (o, e) => { SingleClickWindow.Stop(); StartVolControl(); }; Application.Run(); }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency) { audioClient = device.AudioClient; this.shareMode = shareMode; isUsingEventSync = useEventSync; latencyMilliseconds = latency; }
private void button2_Click(object sender, EventArgs e) { NAudio.CoreAudioApi.MMDevice currentDevice = null; foreach (MMDevice d in new MMDeviceEnumerator().EnumerateAudioEndPoints(DataFlow.Render, DeviceState.Active)) { if (d.FriendlyName == cDevice.Text) { currentDevice = d; } } if (currentDevice == null) { MessageBox.Show("Error: Device not found."); return; } recorder = new PlaybackRecorder(currentDevice); if (System.IO.Directory.Exists(txtOutput.Text) == false) { MessageBox.Show("Error: Please choose a valid output path!"); return; } FixOutputTextbox(); btnRecord.Enabled = false; btnStop.Enabled = true; currentOutput = GetOutputName(); recorder.Record(currentOutput + "_TEMP.wav"); this.Text = "[REC] audio-output-capture"; }
private void initAudioDev() { NAudio.CoreAudioApi.MMDeviceEnumerator MMDE = new NAudio.CoreAudioApi.MMDeviceEnumerator(); //Get all the devices, no matter what condition or status NAudio.CoreAudioApi.MMDeviceCollection DevCol = MMDE.EnumerateAudioEndPoints(NAudio.CoreAudioApi.DataFlow.All, NAudio.CoreAudioApi.DeviceState.Active); //Loop through all devices foreach (NAudio.CoreAudioApi.MMDevice dev in DevCol) { try { if (dev.FriendlyName.Contains("Headphone") || dev.FriendlyName.Contains("Speakers")) { //Get its audio volume System.Diagnostics.Debug.Print("Volume of " + dev.FriendlyName + " is " + dev.AudioEndpointVolume.MasterVolumeLevel.ToString()); audioDev = dev; return; } } catch (Exception ex) { //Do something with exception when an audio endpoint could not be muted System.Diagnostics.Debug.Print(dev.FriendlyName + " could not be muted" + ex.Message); } } }
public static void Connect(IPEndPoint endpoint, MMDevice device, ICodec codec) { var config = new NetPeerConfiguration("airgap"); _client = new NetClient(config); _client.RegisterReceivedCallback(MessageReceived); _client.Start(); _waveIn = new WasapiLoopbackCapture(device); _codec = codec; _sourceFormat = _waveIn.WaveFormat; _targetFormat = new WaveFormat(_codec.SampleRate, _codec.Channels); // format to convert to _waveIn.DataAvailable += SendData; _waveIn.RecordingStopped += (sender, args) => Console.WriteLine("Stopped"); // TODO: RecordingStopped is called when you change the audio device settings, should recover from that NetOutgoingMessage formatMsg = _client.CreateMessage(); formatMsg.Write(_targetFormat.Channels); formatMsg.Write(_targetFormat.SampleRate); formatMsg.Write(codec.Name); _client.Connect(endpoint, formatMsg); }
/// <summary> /// Constructor for session panel creation. /// </summary> /// <param name="device">Selected device.</param> /// <param name="session">Current session of device.</param> public VolumePanel(MMDevice device, AudioSessionControl session) { this.devicePanel = false; this.device = device; this.session = session; InitializeComponent(); cmbDevice.Visible = false; Process process = Process.GetProcessById((int)session.GetProcessID); if (session.IsSystemSoundsSession) { lblName.Text = "System Sounds"; pbProcessIcon.Visible = false; btnSoundProperties.Visible = true; var iconAddress = session.IconPath.Split(','); var icon = IconExtractor.Extract(iconAddress[0], int.Parse(iconAddress[1]), true); if (icon != null) btnSoundProperties.Image = icon.ToBitmap(); tooltip.SetToolTip(btnSoundProperties, lblName.Text); } else { pbProcessIcon.Image = Icon.ExtractAssociatedIcon(process.MainModule.FileName).ToBitmap(); lblName.Text = process.MainWindowTitle != "" ? process.MainWindowTitle : process.ProcessName; pbProcessIcon.Visible = true; btnSoundProperties.Visible = false; tooltip.SetToolTip(pbProcessIcon, lblName.Text); } tooltip.SetToolTip(lblName, lblName.Text); session.RegisterEventClient(this); UpdateVolume(); UpdateMuted(); }
/// <summary> /// Create an input /// </summary> /// <param name="ID">The ID of the input to be created</param> public Input(string ID) { // Set the device ID deviceID = ID; // Get Device from specified ID MMDeviceEnumerator devices = new MMDeviceEnumerator(); device = devices.GetDevice(ID); // Set wave in to WASAPI capture of the specified device waveIn = new WasapiCapture(device); // Set the number of bytes used by each sample sampleByteSize = waveIn.WaveFormat.BitsPerSample / 8; // Add event handler to retrieve samples from the device waveIn.DataAvailable += waveIn_DataAvailable; // Create buffered wave provider bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat); bufferedWaveProvider.DiscardOnBufferOverflow = true; // Create sample channel sampleChannel = new SampleChannel(bufferedWaveProvider); // Create sample provider sampleChannel.PreVolumeMeter += sampleProvider_StreamVolume; // Start recording try { waveIn.StartRecording(); } catch { throw new ArgumentException("This input device is not supported."); } }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency) { this.audioClient = device.AudioClient; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.syncContext = SynchronizationContext.Current; }
public static void VolumeDown(int parDownPercent) { NAudio.CoreAudioApi.MMDeviceEnumerator MMDE = new NAudio.CoreAudioApi.MMDeviceEnumerator(); //Get all the devices, no matter what condition or status NAudio.CoreAudioApi.MMDevice dev = MMDE.GetDefaultAudioEndpoint(NAudio.CoreAudioApi.DataFlow.Render, NAudio.CoreAudioApi.Role.Communications); dev.AudioEndpointVolume.VolumeStepDown(); }
public static void SwitchMute() { NAudio.CoreAudioApi.MMDeviceEnumerator MMDE = new NAudio.CoreAudioApi.MMDeviceEnumerator(); //Get all the devices, no matter what condition or status NAudio.CoreAudioApi.MMDevice dev = MMDE.GetDefaultAudioEndpoint(NAudio.CoreAudioApi.DataFlow.Render, NAudio.CoreAudioApi.Role.Communications); dev.AudioEndpointVolume.Mute = !dev.AudioEndpointVolume.Mute; }
/// <summary> /// Constructor for device panel creation. /// </summary> public VolumePanel() { this.devicePanel = true; var deviceEnumerator = new MMDeviceEnumerator(); device = deviceEnumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); InitializeComponent(); }
public string md5VolumeLabel; // Name used to register volume label #endregion Fields #region Constructors public AudioDevice(MMDevice _device) { this.device = _device; md5DeviceID = Utilities.Md5(_device.ID); md5PanelName = "panel" + md5DeviceID; md5ProgbarName = "progbar" + md5DeviceID; md5VolumeLabel = "volumeLabel" + md5DeviceID; md5FriendlyLabel = "friendlyLabel" + md5DeviceID; }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency">Desired latency in milliseconds</param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency) { audioClient = device.AudioClient; mmDevice = device; this.shareMode = shareMode; isUsingEventSync = useEventSync; latencyMilliseconds = latency; syncContext = SynchronizationContext.Current; outputFormat = audioClient.MixFormat; // allow the user to query the default format for shared mode streams }
public RealTimeSoundData() { var enumerator = new MMDeviceEnumerator(); var captureDevices = enumerator.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active).ToArray(); var defaultDevice = enumerator.GetDefaultAudioEndpoint(DataFlow.Capture, Role.Console); device = captureDevices.FirstOrDefault(c => c.ID == defaultDevice.ID); capture = new WasapiCapture(device); context = SynchronizationContext.Current; capture.DataAvailable += Capture_DataAvailable; }
/// <summary> /// Initializes a new instance of the <see cref="WasapiCapture"/> class. /// </summary> /// <param name="captureDevice">The capture device.</param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> public WasapiCapture(MMDevice captureDevice, bool useEventSync) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; isUsingEventSync = useEventSync; waveFormat = audioClient.MixFormat; }
public void Initalize(MMDevice audioDevice, int fftLength, int bufferLenght) { this.audioDevice = audioDevice; this.bufferLenght = bufferLenght; Capture = new WasapiLoopbackCapture(audioDevice); SampleAggregator = new SampleAggregator(fftLength); SampleAggregator.FftCalculated += new EventHandler<FftEventArgs>(FftCalculated); SampleAggregator.PerformFFT = true; //capture.ShareMode = AudioClientShareMode.Shared; }
public SoundCardRecorder(MMDevice device, string filePath, string song) { Device = device; FilePath = filePath; Song = song; _waveIn = new WasapiCapture(Device); _writer = new WaveFileWriter(FilePath, _waveIn.WaveFormat); _waveIn.DataAvailable += OnDataAvailable; }
/// <summary> /// Creates a new WASAPI Output device /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode">Share mode to use</param> /// <param name="latency">Latency in milliseconds</param> public WasapiOutGuiThread(MMDevice device, AudioClientShareMode shareMode, int latency) { audioClient = device.AudioClient; outputFormat = audioClient.MixFormat; this.shareMode = shareMode; latencyMilliseconds = latency; timer = new Timer(); timer.Tick += TimerOnTick; timer.Interval = latency/2; }
/// <summary> /// Initializes a new instance of the <see cref="WasapiCapture" /> class. /// </summary> /// <param name="captureDevice">The capture device.</param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="audioBufferMillisecondsLength">Length of the audio buffer in milliseconds. A lower value means lower latency but increased CPU usage.</param> public WasapiCapture(MMDevice captureDevice, bool useEventSync, int audioBufferMillisecondsLength) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; isUsingEventSync = useEventSync; this.audioBufferMillisecondsLength = audioBufferMillisecondsLength; waveFormat = audioClient.MixFormat; }
public AudioRecordingService(ISettings settings) { this._settings = settings; this._actualDevice = this._actualDevice = new MMDeviceEnumerator() .EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active) .FirstOrDefault(f => f.DeviceFriendlyName == this._settings.RecorderDeviceName); if (this._actualDevice == null) throw new SpotifyRecorderException("Recording device not found."); }
public AudioRecorder(ISettings settings, MMDevice device, Song song) { this._settings = settings; this.Song = song; this._capture = new WasapiCapture(device); this._capture.DataAvailable += this.CaptureOnDataAvailable; this._fileName = Path.GetTempFileName(); this._writer = new WaveFileWriter(this._fileName, this._capture.WaveFormat); }
public VolumeControl() { InitializeComponent(); MMDeviceEnumerator deviceEnumerator = new MMDeviceEnumerator(); device = deviceEnumerator.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); tbVolume.Value = (int)(Math.Round(device.AudioEndpointVolume.MasterVolumeLevelScalar * 100)); if (device.AudioEndpointVolume.Mute) { btnMuteUnmute.Image = Properties.Resources.Mute; } }
private void Form1_Load(object sender, EventArgs e) { Program.NewKey += new Program.KeyHandler(keyEvent); Rectangle workingArea = Screen.GetWorkingArea(this); this.Location = new Point(workingArea.Right - Size.Width, workingArea.Bottom - Size.Height); device = new MMDeviceEnumerator().EnumerateAudioEndPoints(DataFlow.Render, DeviceState.Active).FirstOrDefault(d => d.ID == "{0.0.0.00000000}.{a14f7b68-1bb8-4de3-bfd6-38fc5fc43b2d}"); device2 = new MMDeviceEnumerator().EnumerateAudioEndPoints(DataFlow.Render, DeviceState.Active).FirstOrDefault(d => d.ID == "{0.0.0.00000000}.{4ac56367-03a9-474e-b1e3-c6dfb1f36aad}");//{0.0.0.00000000}.{234779d1-4c87-4bc3-88c9-e03ea41dfbd0} update(); showFile(); tmrHide.Enabled = true; }
/// <summary> /// Инициализация основного аудио устройства /// </summary> private void InitDevice() { try { var devEnum = new MMDeviceEnumerator(); _audioDevice = devEnum.GetDefaultAudioEndpoint(0, (Role) 1); } catch (Exception e) { MessageBox.Show("Error: " + e.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public Mp3Player() { _playlist = new LinkedList<Mp3FileReader>(); _playbackTimer = new DispatcherTimer(); _playbackTimer.Interval = new TimeSpan(1); _playbackTimer.Tick += PlaybackTimer_Tick; _playbackDevice = GetPlaybackDevice(); _playbackDevice.AudioEndpointVolume.OnVolumeNotification += AudioEndpointVolume_OnVolumeNotification; _playbackDevice.AudioSessionManager.OnSessionCreated += AudioSessionManager_OnSessionCreated; _isMuted = _playbackDevice.AudioEndpointVolume.Mute; }
public AudioType() { _device = new MMDeviceEnumerator() .EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active).FirstOrDefault(); _sampleAggregator.FftCalculated += FftCalculated; _sampleAggregator.PerformFFT = true; // Start listening for sound data _waveIn = new WasapiLoopbackCapture(); _waveIn.DataAvailable += OnDataAvailable; _waveIn.StartRecording(); }
public static void Update() { if (device == null) { MMDeviceEnumerator de = new MMDeviceEnumerator(); device = de.GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); } if (ts == null) { ts = new ThreadStart(getVolume); } if (!threadRunning) { thread = new Thread(ts); threadRunning = true; thread.Start(); } }
private void btnUseDevice_Click(object sender, EventArgs e) { foreach (MMDevice device in devices) { if (device.FriendlyName == cmbDeviceList.SelectedItem.ToString()) { activeDevice = device; } } if (activeDevice == null) { MessageBox.Show("Could not load selected audio device", "Error loading audio device", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); return; } tmrRefreshVolumeBar.Enabled = true; }
/// <summary> /// Creates a new WASAPI Output /// </summary> /// <param name="device">Device to use</param> /// <param name="shareMode"></param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> /// <param name="latency"></param> public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm) { this.audioClient = device.AudioClient; this.shareMode = shareMode; this.isUsingEventSync = useEventSync; this.latencyMilliseconds = latency; this.pcm = pcm; this.outputFormat = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount); NAudio.Wave.WaveFormatExtensible closestSampleRateFormat; if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat)) throw new NotSupportedException("PCM format mismatch"); Init(); bufferFrameCount = audioClient.BufferSize; readBuffers = new AudioBuffer[2]; readBuffers[0] = new AudioBuffer(pcm, bufferFrameCount); readBuffers[1] = new AudioBuffer(pcm, bufferFrameCount); //if (this.shareMode == AudioClientShareMode.Exclusive) // this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000); }
public static void Init(string id) { try { if (id != string.Empty) { _id = Core.Settings.IniReadValue("DEVICE", "device_id"); _recorddevice = new MMDeviceEnumerator().GetDevice(Core.Settings.IniReadValue("DEVICE", "device_id")); } else { _recorddevice = new MMDeviceEnumerator().GetDevice(id); } GC.Collect(); } catch (Exception z) { _recorddevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia); } }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) { syncContext = SynchronizationContext.Current; audioClient = captureDevice.AudioClient; ShareMode = AudioClientShareMode.Shared; waveFormat = audioClient.MixFormat; var wfe = waveFormat as WaveFormatExtensible; if (wfe != null) { try { waveFormat = wfe.ToStandardWaveFormat(); } catch (InvalidOperationException) { // couldn't convert to a standard format } } }
public void Init(frmCUEPlayer parent) { MdiParent = parent; _device = WasapiOut.GetDefaultAudioEndpoint(); _device.AudioEndpointVolume.OnVolumeNotification += new AudioEndpointVolumeNotificationDelegate(AudioEndpointVolume_OnVolumeNotification); mediaSliderVolume.Value = (int)(_device.AudioEndpointVolume.MasterVolumeLevelScalar * 100); //mediaSliderVolume.Maximum = (int)(_device.AudioEndpointVolume.VolumeRange); Show(); int delay = 100; try { _player = new WasapiOut(_device, NAudio.CoreAudioApi.AudioClientShareMode.Shared, true, delay, new AudioPCMConfig(32, 2, 44100)); } catch { _player = null; } if (_player == null) { try { _player = new WasapiOut(_device, NAudio.CoreAudioApi.AudioClientShareMode.Shared, true, delay, new AudioPCMConfig(32, 2, 48000)); SOXResamplerConfig cfg; cfg.Quality = SOXResamplerQuality.Very; cfg.Phase = 50; cfg.AllowAliasing = false; cfg.Bandwidth = 0; _resampler = new SOXResampler(parent.Mixer.PCM, _player.PCM, cfg); resampled = new AudioBuffer(_player.PCM, parent.Mixer.BufferSize * 2 * parent.Mixer.PCM.SampleRate / _player.PCM.SampleRate); } catch (Exception ex) { _player = null; Trace.WriteLine(ex.Message); } } parent.Mixer.AudioRead += new EventHandler<AudioReadEventArgs>(Mixer_AudioRead); if (_player != null) _player.Play(); }
void waveIn_DataAvailable(object sender, WaveInEventArgs e) { float[] volume = new float[8]; for (int i = 0; i < 8; i++) { volume[i] = 0f; for (int index = e.BytesRecorded * i / 8; index < e.BytesRecorded * (i + 1) / 8; index += 2) { short sample = (short)((e.Buffer[index + 1] << 8) | e.Buffer[index + 0]); float val = Math.Abs(sample / 32768f); if (val > volume[i]) { volume[i] = val; } } } NAudio.CoreAudioApi.MMDeviceEnumerator devEnum = new NAudio.CoreAudioApi.MMDeviceEnumerator(); NAudio.CoreAudioApi.MMDevice defaultDevice = devEnum.GetDefaultAudioEndpoint(NAudio.CoreAudioApi.DataFlow.Render, NAudio.CoreAudioApi.Role.Multimedia); VolumeBar.Value = (100 - (defaultDevice.AudioMeterInformation.MasterPeakValue * 100f)); VolumeBar.Value = (100 - (volume[3] * 100)); Provider.AddSamples(e.Buffer, 0, e.BytesRecorded); }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) { this.audioClient = captureDevice.AudioClient; this.waveFormat = audioClient.MixFormat; }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) { this.audioClient = captureDevice.AudioClient; WaveFormat = audioClient.MixFormat; }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) : this(captureDevice, 100) { }
public OutputDeviceModel(NAudio.CoreAudioApi.MMDevice device) : base(device) { }
/// <summary> /// Initialises a new instance of the WASAPI capture class /// </summary> /// <param name="captureDevice">Capture device to use</param> public WasapiCapture(MMDevice captureDevice) : this(captureDevice, false) { }
/// <summary> /// Initializes a new instance of the <see cref="WasapiCapture"/> class. /// </summary> /// <param name="captureDevice">The capture device.</param> /// <param name="useEventSync">true if sync is done with event. false use sleep.</param> public WasapiCapture(MMDevice captureDevice, bool useEventSync) : this(captureDevice, useEventSync, 100) { }