/// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                resamplerDmoStream = new ResamplerDmoStream(waveProvider, outputFormat);
                sourceProvider     = resamplerDmoStream;
            }
            else
            {
                sourceProvider = waveProvider;
            }

            // Normal setup for both sharedMode
            audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                   outputFormat, Guid.Empty);


            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;

            // set up the read buffer
            bufferFrameCount = audioClient.BufferSize;
            bytesPerFrame    = outputFormat.Channels * outputFormat.BitsPerSample / 8;
            readBuffer       = new byte[bufferFrameCount * bytesPerFrame];
        }
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
                _capDevice.ReleaseAudioClient();

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                        _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode, streamFlags,
                        tmp2, tmp2,
                        _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                            case AudioClientError.BufferSizeNotAligned:
                                uint bufSize = _audioClient.BufferSize;
                                tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                                _audioClient.Initialize(shareMode,
                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                                break;

                            case AudioClientError.UnsupportedFormat:

                                break;
                        }
                    }
                    catch (InvalidCastException)
                    {

                    }
                }

                _capClient = _audioClient.AudioCaptureClient;

            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }
示例#3
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    outputFormat = GetFallbackFormat();
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                try
                {
                    // just check that we can make it.
                    using (new ResamplerDmoStream(waveProvider, outputFormat))
                    {
                    }
                }
                catch (Exception)
                {
                    // On Windows 10 some poorly coded drivers return a bad format in to closestSampleRateFormat
                    // In that case, try and fallback as if it provided no closest (e.g. force trying the mix format)
                    outputFormat = GetFallbackFormat();
                    using (new ResamplerDmoStream(waveProvider, outputFormat))
                    {
                    }
                }
                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    try
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                               outputFormat, Guid.Empty);
                    }
                    catch (COMException ex)
                    {
                        // Starting with Windows 7, Initialize can return AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED for a render device.
                        // We should to initialize again.
                        if (ex.ErrorCode != ErrorCodes.AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED)
                        {
                            throw ex;
                        }

                        // Calculate the new latency.
                        long newLatencyRefTimes = (long)(10000000.0 /
                                                         (double)this.outputFormat.SampleRate *
                                                         (double)this.audioClient.BufferSize + 0.5);

                        this.audioClient.Dispose();
                        this.audioClient = this.mmDevice.AudioClient;
                        this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.EventCallback,
                                                    newLatencyRefTimes, newLatencyRefTimes, this.outputFormat, Guid.Empty);
                    }
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
示例#4
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                using (new ResamplerDmoStream(waveProvider, outputFormat))
                {
                }
                this.dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            this.sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0,
                                           outputFormat, Guid.Empty);

                    // Get back the effective latency from AudioClient
                    latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                           outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
        private void InitializeInternal()
        {
            var defaultFormat = _waveFormat;

            _audioClient = AudioClient.FromMMDevice(Device);

            /*if (_shareMode == AudioClientShareMode.Exclusive)
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }
             * else
             * {
             *  _waveFormat = _waveFormat ?? _audioClient.MixFormat;
             * }*/
            _waveFormat = _waveFormat ?? _audioClient.MixFormat;

            _waveFormat = SetupWaveFormat(_waveFormat, _audioClient);

            if (!_eventSync)
            {
                _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None | GetStreamFlags(), _latency * ReftimesPerMillisecond, 0, _waveFormat, Guid.Empty);
            }
            else
            {
                if (_shareMode == AudioClientShareMode.Exclusive)
                {
                    try
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), _latency * ReftimesPerMillisecond, _latency * ReftimesPerMillisecond, _waveFormat, Guid.Empty);
                    }
                    catch (CoreAudioAPIException e)
                    {
                        if (e.ErrorCode == unchecked ((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                        {
                            int bufferSize = _audioClient.BufferSize;
                            _audioClient.Dispose();
                            long hnsRequestedDuration = (long)(((double)ReftimesPerMillisecond * 1000 / _waveFormat.SampleRate * bufferSize) + 0.5);
                            _audioClient = AudioClient.FromMMDevice(Device);
                            if (defaultFormat == null)
                            {
                                _waveFormat = _audioClient.MixFormat;
                            }
                            _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), hnsRequestedDuration, hnsRequestedDuration, _waveFormat, Guid.Empty);
                        }
                    }
                }
                else
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), 0, 0, _waveFormat, Guid.Empty);
                    if (_audioClient.StreamLatency > 0)
                    {
                        _latency = (int)(_audioClient.StreamLatency / ReftimesPerMillisecond);
                    }
                }

                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _audioCaptureClient = AudioCaptureClient.FromAudioClient(_audioClient);
        }
示例#6
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                using (new ResamplerDmoStream(waveProvider, outputFormat))
                {
                }
                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    try
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                               outputFormat, Guid.Empty);
                    }
                    catch (COMException ex)
                    {
                        // Starting with Windows 7, Initialize can return AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED for a render device.
                        // We should to initialize again.
                        if (ex.ErrorCode != ErrorCodes.AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED)
                        {
                            throw ex;
                        }

                        // Calculate the new latency.
                        long newLatencyRefTimes = (long)(10000000.0 /
                                                         (double)this.outputFormat.SampleRate *
                                                         (double)this.audioClient.BufferSize + 0.5);

                        this.audioClient.Dispose();
                        this.audioClient = this.mmDevice.AudioClient;
                        this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.EventCallback,
                                                    newLatencyRefTimes, newLatencyRefTimes, this.outputFormat, Guid.Empty);
                    }
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
示例#7
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        private IWaveProvider Init()
        {
            var  waveProvider    = waveProviderFunc();
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                //using (new MediaFoundationResampler(waveProvider, outputFormat))
                {
                }
                this.resamplerNeeded = true;
            }
            else
            {
                resamplerNeeded = false;
            }

            // Init Shared or Exclusive
            if (shareMode == AudioClientShareMode.Shared)
            {
                // With EventCallBack and Shared,
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);

                // Get back the effective latency from AudioClient
                latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
            }
            else
            {
                // With EventCallBack and Exclusive, both latencies must equals
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                       outputFormat, Guid.Empty);
            }

            // Create the Wait Event Handle
            frameEventWaitHandle = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
            audioClient.SetEventHandle(frameEventWaitHandle);

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
            return(waveProvider);
        }
示例#8
0
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return di.DeviceId == devId; });
            if (_capDevice == null)
            {
            #warning 例外
                _audioClient = null;
                _capClient = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
                case AudioClientShareMode.Shared:
                    switch (_capDevice.DataFlow)
                    {
                        case EDataFlow.eCapture:
                            streamFlags = 0;
                            break;
                        case EDataFlow.eRender:
                            streamFlags = AudioClientStreamFlags.Loopback;
                            break;
                    }
                    break;
                case AudioClientShareMode.Exclusive:
                    streamFlags = AudioClientStreamFlags.NoPersist;
                    break;
            }

            // フォーマット
            if (_audioClient != null) _capDevice.ReleaseAudioClient();

            // ボリューム
            _masterVolume = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt)) _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }
示例#9
0
        private void InitializeInternal()
        {
            const int reftimesPerMillisecond = 10000;

            _audioClient  = AudioClient.FromMMDevice(Device);
            _outputFormat = SetupWaveFormat(_source, _audioClient);

            long latency = _latency * reftimesPerMillisecond;

AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED_TRY_AGAIN:
            try
            {
                if (!_eventSync)
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None, latency, 0, _outputFormat,
                                            Guid.Empty);
                }
                else //event sync
                {
                    if (_shareMode == AudioClientShareMode.Exclusive) //exclusive
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback, latency,
                                                latency, _outputFormat, Guid.Empty);
                    }
                    else //shared
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback, 0, 0,
                                                _outputFormat, Guid.Empty);
                        //latency = (int)(_audioClient.StreamLatency / reftimesPerMillisecond);
                    }
                }
            }
            catch (CoreAudioAPIException exception)
            {
                if (exception.ErrorCode == unchecked ((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                {
                    const long reftimesPerSec = 10000000;
                    int        framesInBuffer = _audioClient.GetBufferSize();
                    // ReSharper disable once PossibleLossOfFraction
                    latency = (int)(reftimesPerSec * framesInBuffer / _outputFormat.SampleRate + 0.5);
                    goto AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED_TRY_AGAIN;
                }
                throw;
            }

            if (_audioClient.StreamLatency != 0) //windows 10 returns zero, got no idea why => https://github.com/filoe/cscore/issues/11
            {
                Latency = (int)(_audioClient.StreamLatency / reftimesPerMillisecond);
            }

            if (_eventSync)
            {
                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _renderClient = AudioRenderClient.FromAudioClient(_audioClient);

            if (_streamSwitchEvent == null)
            {
                _streamSwitchEvent = new AutoResetEvent(false);
            }

            InitializeStreamRouting();
        }
示例#10
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    outputFormat = GetFallbackFormat();
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                if (!windowsN)
                {
                    try
                    {
                        // just check that we can make it.
                        using (new ResamplerDmoStream(waveProvider, outputFormat))
                        {
                        }
                    }
                    catch (Exception)
                    {
                        // On Windows 10 some poorly coded drivers return a bad format in to closestSampleRateFormat
                        // In that case, try and fallback as if it provided no closest (e.g. force trying the mix format)
                        outputFormat = GetFallbackFormat();
                        try
                        {
                            using (new ResamplerDmoStream(waveProvider, outputFormat))
                            {
                            }
                        }
                        catch (Exception)
                        {
                            //still something wrong - assume windows N and DMO is broken in some way
                            windowsN = true;
                        }
                    }
                }

                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                           outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
示例#11
0
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient   = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode   shareMode   = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode, streamFlags,
                                            tmp2, tmp2,
                                            _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                        case AudioClientError.BufferSizeNotAligned:
                            uint bufSize = _audioClient.BufferSize;
                            tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                            _audioClient.Initialize(shareMode,
                                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                            break;

                        case AudioClientError.UnsupportedFormat:

                            break;
                        }
                    }
                    catch (InvalidCastException)
                    {
                    }
                }

                _capClient = _audioClient.AudioCaptureClient;
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
示例#12
0
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return(di.DeviceId == devId); });

            if (_capDevice == null)
            {
#warning 例外
                _audioClient = null;
                _capClient   = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
            case AudioClientShareMode.Shared:
                switch (_capDevice.DataFlow)
                {
                case EDataFlow.eCapture:
                    streamFlags = 0;
                    break;

                case EDataFlow.eRender:
                    streamFlags = AudioClientStreamFlags.Loopback;
                    break;
                }
                break;

            case AudioClientShareMode.Exclusive:
                streamFlags = AudioClientStreamFlags.NoPersist;
                break;
            }

            // フォーマット
            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            // ボリューム
            _masterVolume   = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode,
                                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                                                streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
示例#13
0
        //public void Setup(string DeviceId, bool useEventSync = false, int audioBufferMillisecondsLength = 100, bool exclusiveMode = false)
        public void Setup(string deviceId, object captureProperties = null)
        {
            logger.Debug("AudioSourceEx::Setup(...) " + deviceId);

            if (captureState != CaptureState.Closed)
            {
                throw new InvalidOperationException("Invalid audio capture state " + captureState);
            }

            WasapiCaptureProperties wasapiCaptureProperties = captureProperties as WasapiCaptureProperties ?? new WasapiCaptureProperties();

            using (var deviceEnum = new MMDeviceEnumerator())
            {
                var mmDevices = deviceEnum.EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active);

                for (int i = 0; i < mmDevices.Count; i++)
                {
                    var d = mmDevices[i];
                    if (d.ID == deviceId)
                    {
                        captureDevice = d;
                        continue;
                    }
                    d.Dispose();
                }
            }

            if (captureDevice == null)
            {
                throw new Exception("MMDevice not found...");
            }

            this.isUsingEventSync = wasapiCaptureProperties.EventSyncMode;
            this.audioBufferMillisecondsLength = wasapiCaptureProperties.BufferMilliseconds;

            this.audioClient = captureDevice.AudioClient;
            this.ShareMode   = wasapiCaptureProperties.ExclusiveMode? AudioClientShareMode.Exclusive : AudioClientShareMode.Shared;

            this.waveFormat = audioClient.MixFormat;

            long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength;

            if (!audioClient.IsFormatSupported(ShareMode, waveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }

            try
            {
                var streamFlags = AudioClientStreamFlags.None;
                if (captureDevice.DataFlow != DataFlow.Capture)
                {
                    streamFlags = AudioClientStreamFlags.Loopback;
                }

                // If using EventSync, setup is specific with shareMode
                if (isUsingEventSync)
                {
                    var flags = AudioClientStreamFlags.EventCallback | streamFlags;

                    // Init Shared or Exclusive
                    if (ShareMode == AudioClientShareMode.Shared)
                    {
                        // With EventCallBack and Shared, both latencies must be set to 0
                        audioClient.Initialize(ShareMode, flags, requestedDuration, 0, waveFormat, Guid.Empty);
                    }
                    else
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(ShareMode, flags, requestedDuration, requestedDuration, waveFormat, Guid.Empty);
                    }

                    // Create the Wait Event Handle
                    frameEventWaitHandle = new AutoResetEvent(false);
                    audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
                }
                else
                {
                    // Normal setup for both sharedMode
                    audioClient.Initialize(ShareMode, streamFlags, requestedDuration, 0, waveFormat, Guid.Empty);
                }

                int bufferFrameCount = audioClient.BufferSize;
                bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8;
                recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];

                captureState = CaptureState.Initialized;
            }
            catch (Exception ex)
            {
                logger.Error(ex);

                CleanUp();

                throw;
            }
        }