public void Initialize() { if (Initialized) { return; } // get default device. var deviceCapture = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eCapture, Role); var deviceRender = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eRender, Role); if (deviceCapture == null || deviceRender == null) { OnStateChanged?.Invoke(EMicState.InitializeFailed); return; } capture = new WasapiCapture(deviceCapture); // Captureデバイスの準備 render = new WasapiRender(deviceRender, ShareMode, true, 0); // Renderデバイスの準備 capture.Initialize(); render.Initialize(capture.WaveProvider); capture.StoppedEvent += OnCaptureStopped; render.StoppedEvent += OnCaptureStopped; Debug.WriteLine(string.Format("capture format:{0}", capture.WaveFormat)); Debug.WriteLine(string.Format("render format:{0}", render.WaveFormat)); deviceEnumerator.OnDefaultDeviceChanged += DeviceChanged; Initialized = true; OnStateChanged?.Invoke(EMicState.Initialized); }
public void Start() { var deviceEnumerator = MMDeviceEnumerator.GetInstance(); // get default device. var deviceCapture = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eCapture, ERole.eConsole); var deviceRender = deviceEnumerator.GetDefaultAudioEndpoint(EDataFlow.eRender, ERole.eConsole); PutDeviceInfo(deviceCapture); //device.AudioEndpointVolume.Mute = !device.AudioEndpointVolume.Mute; /* * var collection = deviceEnumerator.EnumAudioEndpoints(EDataFlow.eCapture, EDeviceState.Active); * * var devices = new List<MMDevice>(); * * for (int i = 0, len = collection.Count; i < len; ++i) * { * if (device == collection[i]) * { * Console.WriteLine("this is default;"+i); * } * * devices.Add(collection[i]); * } * * devices.ForEach((i) => { * if (device.Equals(i)) * { * Console.WriteLine("this is default;"); * } * PutDeviceInfo(i); * }); * * if (device == null ) * { * return; * } */ var shareMode = EAudioClientShareMode.Shared; capture = new WasapiCapture(deviceCapture); // Captureデバイスの準備 render = new WasapiRender(deviceRender, shareMode, true, 0); // Renderデバイスの準備 capture.Initialize(); render.Initialize(capture.WaveProvider); Task.Run(async() => { capture.Start(); render.Play(); Debug.WriteLine(string.Format("capture:{0}", capture.WaveFormat)); Debug.WriteLine(string.Format("render :{0}", render.WaveFormat)); await Task.Delay(2000); render.Stop(); capture.Stop(); }); /* * var audioClient = device.AudioClient; * var formatTag = audioClient.MixFormat; * Console.WriteLine("formatTag1:{0}", formatTag); * //formatTag.BitsPerSample = 16; * Console.WriteLine("formatTag2:{0}", formatTag); * //WaveFormatExtensible altFormat; * //var supported = audioClient.IsFormatSupported(DeviceShareMode.Shared, altFormat); * //if (altFormat != null) * //{ * // formatTag = altFormat; * //} * Console.WriteLine("altFormat:{0}", formatTag); * // 再生レイテンシ * const uint latency_ms_ = 50; * const uint periods_per_buffer_ = 4; // バッファ中の区切り数(レイテンシ時間が何個あるか) * uint buffer_period = latency_ms_ * 10000; * uint buffer_duration = buffer_period * periods_per_buffer_; * audioClient.Initialize(EAudioClientShareMode.Shared, * EAudioClientStreamFlags.NoPersist, * buffer_duration, buffer_period, formatTag, Guid.NewGuid()); * * * // 曲データ準備 * var musicdata = create_wave_data(formatTag, 10); * * // * // 再生クライアント取得 * // * var buffer_size = audioClient.BufferSize; * var audioRenderClient = audioClient.AudioRenderClient; * * var buffer = audioRenderClient.GetBuffer(buffer_size); * var size =(int)(buffer_size / sizeof(short)); * Marshal.Copy(musicdata, 0, buffer, size); * * audioClient.Start(); */ //var buffer = audioRenderClient.GetBuffer(buffer_size); //Marshal.FreeHGlobal(buffer); }