예제 #1
0
        private void InitializeCaptureDevice(IAudioClient audioClientInterface)
        {
            var audioClient = new AudioClient((IAudioClient)audioClientInterface);

            if (waveFormat == null)
            {
                this.waveFormat = audioClient.MixFormat;
            }

            long requestedDuration = REFTIMES_PER_MILLISEC * 100;


            if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, WaveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }

            var streamFlags = GetAudioClientStreamFlags();

            audioClient.Initialize(AudioClientShareMode.Shared,
                                   streamFlags,
                                   requestedDuration,
                                   0,
                                   this.waveFormat,
                                   Guid.Empty);


            int bufferFrameCount = audioClient.BufferSize;

            this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8;
            this.recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];
            Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));

            // Get back the effective latency from AudioClient
            latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
        }
예제 #2
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                using (new ResamplerDmoStream(waveProvider, outputFormat))
                {
                }
                this.dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            this.sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0,
                                           outputFormat, Guid.Empty);

                    // Get back the effective latency from AudioClient
                    latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                           outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
예제 #3
0
파일: WasapiOut.cs 프로젝트: skpaul/cscore
        private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient)
        {
            WaveFormat waveFormat = source.WaveFormat;
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;

            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats =
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate,    32, waveFormat.Channels,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8, waveFormat.Channels,
                                                     AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate,    32,                   2,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24,                   2,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16,                   2,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8,                   2,
                                                     AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate,    32,                   1,
                                                     AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate,    24,                   1,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,    16,                   1,
                                                     AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate,     8,                   1,
                                                     AudioSubTypes.Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            throw new NotSupportedException("Could not find a supported format.");
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }

                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                                        ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                                        : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source           = resampler;
                _createdResampler = true;

                return(finalFormat);
            }

            return(finalFormat);
        }
예제 #4
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        private IWaveProvider Init()
        {
            var  waveProvider    = waveProviderFunc();
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                //using (new MediaFoundationResampler(waveProvider, outputFormat))
                {
                }
                this.resamplerNeeded = true;
            }
            else
            {
                resamplerNeeded = false;
            }

            // Init Shared or Exclusive
            if (shareMode == AudioClientShareMode.Shared)
            {
                // With EventCallBack and Shared,
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);

                // Get back the effective latency from AudioClient. On Windows 10 it can be 0
                if (audioClient.StreamLatency > 0)
                {
                    latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
            }
            else
            {
                // With EventCallBack and Exclusive, both latencies must equals
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                       outputFormat, Guid.Empty);
            }

            // Create the Wait Event Handle
            frameEventWaitHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
            audioClient.SetEventHandle(frameEventWaitHandle);

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
            return(waveProvider);
        }
예제 #5
0
        private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient)
        {
            WaveFormat waveFormat = source.WaveFormat;
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;

            //check whether initial format is supported
            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                //initial format is not supported -> maybe there was some kind of close match ...
                if (closestMatch == null)
                {
                    //no match ... check whether the format of the windows audio mixer is supported
                    //yes ... this gets executed for shared and exclusive mode streams
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        //mixformat is not supported
                        //start generating possible formats

                        mixformat = null;
                        WaveFormatExtensible[] possibleFormats;
                        if (_shareMode == AudioClientShareMode.Exclusive)
                        {
                            //for exclusive mode streams use the DeviceFormat of the initialized MMDevice
                            //as base for further possible formats
                            var deviceFormat = Device.DeviceFormat;

                            //generate some possible formats based on the samplerate of the DeviceFormat
                            possibleFormats = GetPossibleFormats(deviceFormat.SampleRate, deviceFormat.Channels);
                            if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                //none of the tested formats were supported
                                //try some different samplerates
                                List <WaveFormatExtensible> waveFormats = new List <WaveFormatExtensible>();
                                foreach (var sampleRate in new[] { 44100, 48000, 96000, 192000 })
                                {
                                    waveFormats.AddRange(GetPossibleFormats(sampleRate, deviceFormat.Channels));
                                }

                                //assign the generated formats with samplerates 44.1kHz, 48kHz, 96kHz and 192kHz to
                                //the possibleFormats array which will be used below
                                possibleFormats = waveFormats.ToArray();
                            }
                        }
                        else
                        {
                            //for shared mode streams, generate some formats based on the initial waveFormat
                            possibleFormats = GetPossibleFormats(waveFormat.SampleRate, waveFormat.Channels);
                        }

                        if (mixformat == null)
                        {
                            if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                throw new NotSupportedException("Could not find a supported format.");
                            }
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }

                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                    ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                    : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source           = resampler;
                _createdResampler = true;

                return(finalFormat);
            }

            return(finalFormat);
        }
예제 #6
0
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
                _capDevice.ReleaseAudioClient();

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                        _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode, streamFlags,
                        tmp2, tmp2,
                        _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                            case AudioClientError.BufferSizeNotAligned:
                                uint bufSize = _audioClient.BufferSize;
                                tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                                _audioClient.Initialize(shareMode,
                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                                break;

                            case AudioClientError.UnsupportedFormat:

                                break;
                        }
                    }
                    catch (InvalidCastException)
                    {

                    }
                }

                _capClient = _audioClient.AudioCaptureClient;

            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }
예제 #7
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null and we're using exclusive mode, try to get the device format property.
                        if (correctSampleRateFormat == null && shareMode == AudioClientShareMode.Exclusive)
                        {
                            // Based on https://stackoverflow.com/questions/22616924/wasapi-choosing-a-wave-format-for-exclusive-output
                            byte[] waveFormatBytes = (byte[])mmDevice.Properties[PropertyKeys.PKEY_AudioEngine_DeviceFormat].Value;
                            if (waveFormatBytes != null)
                            {
                                GCHandle handle = GCHandle.Alloc(waveFormatBytes, GCHandleType.Pinned);
                                try
                                {
                                    correctSampleRateFormat = (WaveFormatExtensible)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(WaveFormatExtensible));
                                }
                                finally
                                {
                                    handle.Free();
                                }
                            }
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                using (new ResamplerDmoStream(waveProvider, outputFormat))
                {
                }
                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                           outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
예제 #8
0
 private bool IsFormatSupported(WaveFormat waveFormat, AudioClientShareMode sharemode, AudioClient audioClient)
 {
     return(audioClient.IsFormatSupported(sharemode, waveFormat));
 }
예제 #9
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                resamplerDmoStream = new ResamplerDmoStream(waveProvider, outputFormat);
                sourceProvider     = resamplerDmoStream;
            }
            else
            {
                sourceProvider = waveProvider;
            }

            // Normal setup for both sharedMode
            audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                   outputFormat, Guid.Empty);


            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;

            // set up the read buffer
            bufferFrameCount = audioClient.BufferSize;
            bytesPerFrame    = outputFormat.Channels * outputFormat.BitsPerSample / 8;
            readBuffer       = new byte[bufferFrameCount * bytesPerFrame];
        }
예제 #10
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                using (new ResamplerDmoStream(waveProvider, outputFormat))
                {
                }
                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    try
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                               outputFormat, Guid.Empty);
                    }
                    catch (COMException ex)
                    {
                        // Starting with Windows 7, Initialize can return AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED for a render device.
                        // We should to initialize again.
                        if (ex.ErrorCode != ErrorCodes.AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED)
                        {
                            throw ex;
                        }

                        // Calculate the new latency.
                        long newLatencyRefTimes = (long)(10000000.0 /
                                                         (double)this.outputFormat.SampleRate *
                                                         (double)this.audioClient.BufferSize + 0.5);

                        this.audioClient.Dispose();
                        this.audioClient = this.mmDevice.AudioClient;
                        this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.EventCallback,
                                                    newLatencyRefTimes, newLatencyRefTimes, this.outputFormat, Guid.Empty);
                    }
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
예제 #11
0
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return di.DeviceId == devId; });
            if (_capDevice == null)
            {
            #warning 例外
                _audioClient = null;
                _capClient = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
                case AudioClientShareMode.Shared:
                    switch (_capDevice.DataFlow)
                    {
                        case EDataFlow.eCapture:
                            streamFlags = 0;
                            break;
                        case EDataFlow.eRender:
                            streamFlags = AudioClientStreamFlags.Loopback;
                            break;
                    }
                    break;
                case AudioClientShareMode.Exclusive:
                    streamFlags = AudioClientStreamFlags.NoPersist;
                    break;
            }

            // フォーマット
            if (_audioClient != null) _capDevice.ReleaseAudioClient();

            // ボリューム
            _masterVolume = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat = _audioClient.MixFormat;

                _capFormat.wFormatTag = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt)) _capFormat = tmpFmt;
                    _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient = null;
                throw ex;
            }
        }
예제 #12
0
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient   = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode   shareMode   = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode, streamFlags,
                                            tmp2, tmp2,
                                            _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                        case AudioClientError.BufferSizeNotAligned:
                            uint bufSize = _audioClient.BufferSize;
                            tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                            _audioClient.Initialize(shareMode,
                                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                            break;

                        case AudioClientError.UnsupportedFormat:

                            break;
                        }
                    }
                    catch (InvalidCastException)
                    {
                    }
                }

                _capClient = _audioClient.AudioCaptureClient;
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
예제 #13
0
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return(di.DeviceId == devId); });

            if (_capDevice == null)
            {
#warning 例外
                _audioClient = null;
                _capClient   = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
            case AudioClientShareMode.Shared:
                switch (_capDevice.DataFlow)
                {
                case EDataFlow.eCapture:
                    streamFlags = 0;
                    break;

                case EDataFlow.eRender:
                    streamFlags = AudioClientStreamFlags.Loopback;
                    break;
                }
                break;

            case AudioClientShareMode.Exclusive:
                streamFlags = AudioClientStreamFlags.NoPersist;
                break;
            }

            // フォーマット
            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            // ボリューム
            _masterVolume   = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode,
                                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                                                streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
예제 #14
0
        //public void Setup(string DeviceId, bool useEventSync = false, int audioBufferMillisecondsLength = 100, bool exclusiveMode = false)
        public void Setup(string deviceId, object captureProperties = null)
        {
            logger.Debug("AudioSourceEx::Setup(...) " + deviceId);

            if (captureState != CaptureState.Closed)
            {
                throw new InvalidOperationException("Invalid audio capture state " + captureState);
            }

            WasapiCaptureProperties wasapiCaptureProperties = captureProperties as WasapiCaptureProperties ?? new WasapiCaptureProperties();

            using (var deviceEnum = new MMDeviceEnumerator())
            {
                var mmDevices = deviceEnum.EnumerateAudioEndPoints(DataFlow.All, DeviceState.Active);

                for (int i = 0; i < mmDevices.Count; i++)
                {
                    var d = mmDevices[i];
                    if (d.ID == deviceId)
                    {
                        captureDevice = d;
                        continue;
                    }
                    d.Dispose();
                }
            }

            if (captureDevice == null)
            {
                throw new Exception("MMDevice not found...");
            }

            this.isUsingEventSync = wasapiCaptureProperties.EventSyncMode;
            this.audioBufferMillisecondsLength = wasapiCaptureProperties.BufferMilliseconds;

            this.audioClient = captureDevice.AudioClient;
            this.ShareMode   = wasapiCaptureProperties.ExclusiveMode? AudioClientShareMode.Exclusive : AudioClientShareMode.Shared;

            this.waveFormat = audioClient.MixFormat;

            long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength;

            if (!audioClient.IsFormatSupported(ShareMode, waveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }

            try
            {
                var streamFlags = AudioClientStreamFlags.None;
                if (captureDevice.DataFlow != DataFlow.Capture)
                {
                    streamFlags = AudioClientStreamFlags.Loopback;
                }

                // If using EventSync, setup is specific with shareMode
                if (isUsingEventSync)
                {
                    var flags = AudioClientStreamFlags.EventCallback | streamFlags;

                    // Init Shared or Exclusive
                    if (ShareMode == AudioClientShareMode.Shared)
                    {
                        // With EventCallBack and Shared, both latencies must be set to 0
                        audioClient.Initialize(ShareMode, flags, requestedDuration, 0, waveFormat, Guid.Empty);
                    }
                    else
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(ShareMode, flags, requestedDuration, requestedDuration, waveFormat, Guid.Empty);
                    }

                    // Create the Wait Event Handle
                    frameEventWaitHandle = new AutoResetEvent(false);
                    audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
                }
                else
                {
                    // Normal setup for both sharedMode
                    audioClient.Initialize(ShareMode, streamFlags, requestedDuration, 0, waveFormat, Guid.Empty);
                }

                int bufferFrameCount = audioClient.BufferSize;
                bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8;
                recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];

                captureState = CaptureState.Initialized;
            }
            catch (Exception ex)
            {
                logger.Error(ex);

                CleanUp();

                throw;
            }
        }