public void Setup(AudioCaptureDevice captDeviceSettings)
        {
            CaptDeviceSettings = captDeviceSettings;

            captureProps = (WasapiCaptureProperties)CaptDeviceSettings.Properties;

            exclusiveModeCheckBox.Checked = captureProps.ExclusiveMode;

            var bufferMsec = captureProps.BufferMilliseconds;

            if (bufferMsec > bufferSizeNumeric.Maximum)
            {
                bufferMsec = (int)bufferSizeNumeric.Maximum;
            }
            else if (bufferMsec < bufferSizeNumeric.Minimum)
            {
                bufferMsec = (int)bufferSizeNumeric.Minimum;
            }
            bufferSizeNumeric.Value = bufferMsec;


            eventSyncModeCheckBox.Checked = captureProps.EventSyncMode;
            //showCaptureBorderCheckBox.Checked = captureProps.ShowDebugBorder;
        }
Beispiel #2
0
        //public void Setup(string DeviceId, bool useEventSync = false, int audioBufferMillisecondsLength = 100, bool exclusiveMode = false)
        public void Setup(string deviceId, object captureProperties = null)
        {
            logger.Debug("AudioSourceEx::Setup(...) " + deviceId);

            if (captureState != CaptureState.Closed)
            {
                throw new InvalidOperationException("Invalid audio capture state " + captureState);
            }

            WasapiCaptureProperties wasapiCaptureProperties = captureProperties as WasapiCaptureProperties ?? new WasapiCaptureProperties();

            using (var deviceEnum = new MMDeviceEnumerator())
            {
                var mmDevices = deviceEnum.EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active);

                for (int i = 0; i < mmDevices.Count; i++)
                {
                    var d = mmDevices[i];
                    if (d.ID == deviceId)
                    {
                        captureDevice = d;
                        continue;
                    }
                    d.Dispose();
                }
            }

            if (captureDevice == null)
            {
                throw new Exception("MMDevice not found...");
            }

            this.isUsingEventSync = wasapiCaptureProperties.EventSyncMode;
            this.audioBufferMillisecondsLength = wasapiCaptureProperties.BufferMilliseconds;

            this.audioClient = captureDevice.AudioClient;
            this.ShareMode   = wasapiCaptureProperties.ExclusiveMode? AudioClientShareMode.Exclusive : AudioClientShareMode.Shared;

            this.waveFormat = audioClient.MixFormat;

            long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength;

            if (!audioClient.IsFormatSupported(ShareMode, waveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }

            try
            {
                var streamFlags = AudioClientStreamFlags.None;
                if (captureDevice.DataFlow != DataFlow.Capture)
                {
                    streamFlags = AudioClientStreamFlags.Loopback;
                }

                // If using EventSync, setup is specific with shareMode
                if (isUsingEventSync)
                {
                    var flags = AudioClientStreamFlags.EventCallback | streamFlags;

                    // Init Shared or Exclusive
                    if (ShareMode == AudioClientShareMode.Shared)
                    {
                        // With EventCallBack and Shared, both latencies must be set to 0
                        audioClient.Initialize(ShareMode, flags, requestedDuration, 0, waveFormat, Guid.Empty);
                    }
                    else
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(ShareMode, flags, requestedDuration, requestedDuration, waveFormat, Guid.Empty);
                    }

                    // Create the Wait Event Handle
                    frameEventWaitHandle = new AutoResetEvent(false);
                    audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
                }
                else
                {
                    // Normal setup for both sharedMode
                    audioClient.Initialize(ShareMode, streamFlags, requestedDuration, 0, waveFormat, Guid.Empty);
                }

                int bufferFrameCount = audioClient.BufferSize;
                bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8;
                recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];

                captureState = CaptureState.Initialized;
            }
            catch (Exception ex)
            {
                logger.Error(ex);

                CleanUp();

                throw;
            }
        }