コード例 #1
0
ファイル: AudioClock.cs プロジェクト: hoangduit/cscore
        /// <summary>
        ///     Creates a new <see cref="AudioCaptureClient" /> by calling the <see cref="AudioClient.GetService" /> method of the
        ///     specified <paramref name="audioClient" />.
        /// </summary>
        /// <param name="audioClient">
        ///     <see cref="AudioClient" /> which should be used to create the <see cref="AudioCaptureClient" />-instance
        ///     with.
        /// </param>
        /// <returns>A new <see cref="AudioCaptureClient" />.</returns>
        public static AudioClock FromAudioClient(AudioClient audioClient)
        {
            if (audioClient == null)
                throw new ArgumentNullException("audioClient");

            return new AudioClock(audioClient.GetService(typeof (AudioClock).GUID));
        }
コード例 #2
0
        public static SimpleAudioVolume FromAudioClient(AudioClient audioClient)
        {
            if (audioClient == null)
                throw new ArgumentNullException("audioClient");

            return new SimpleAudioVolume(audioClient.GetService(IID_SimpleAudioVolume));
        }
コード例 #3
0
        /// <summary>
        ///     Creates a new <see cref="AudioCaptureClient" /> by calling the <see cref="AudioClient.GetService" /> method of the
        ///     specified <paramref name="audioClient" />.
        /// </summary>
        /// <param name="audioClient">
        ///     The <see cref="AudioClient" /> which should be used to create the <see cref="AudioCaptureClient" />-instance
        ///     with.
        /// </param>
        /// <returns>A new instance of the <see cref="AudioCaptureClient"/> class.</returns>
        public static AudioCaptureClient FromAudioClient(AudioClient audioClient)
        {
            if (audioClient == null)
                throw new ArgumentNullException("audioClient");

            return new AudioCaptureClient(audioClient.GetService(IID_IAudioCaptureClient));
        }
コード例 #4
0
ファイル: WasapiOut.cs プロジェクト: opcon/cscore
        private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient)
        {
            WaveFormat waveFormat = source.WaveFormat;
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;
            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats =
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels,
                                AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels,
                                AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels,
                                AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 8, waveFormat.Channels,
                                AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate, 32, 2,
                                AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate, 24, 2,
                                AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 16, 2,
                                AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 8, 2,
                                AudioSubTypes.Pcm),

                            new WaveFormatExtensible(waveFormat.SampleRate, 32, 1,
                                AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate, 24, 1,
                                AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 16, 1,
                                AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 8, 1,
                                AudioSubTypes.Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            throw new NotSupportedException("Could not find a supported format.");
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                    finalFormat = closestMatch;

                //todo: test channel matrix conversion
                ChannelMatrix channelMatrix = null;
                if (UseChannelMixingMatrices)
                {
                    try
                    {
                        channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat);
                    }
                    catch (Exception)
                    {
                        Debug.WriteLine("No channelmatrix was found.");
                    }
                }
                DmoResampler resampler = channelMatrix != null
                    ? new DmoChannelResampler(_source, channelMatrix, finalFormat)
                    : new DmoResampler(_source, finalFormat);
                resampler.Quality = 60;

                _source = resampler;
                _createdResampler = true;

                return finalFormat;
            }

            return finalFormat;
        }
コード例 #5
0
ファイル: WasapiOut.cs プロジェクト: opcon/cscore
        private void InitializeInternal()
        {
            const int reftimesPerMillisecond = 10000;

            _audioClient = AudioClient.FromMMDevice(Device);
            _outputFormat = SetupWaveFormat(_source, _audioClient);

            long latency = _latency * reftimesPerMillisecond;
            AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED_TRY_AGAIN:
            try
            {

                if (!_eventSync)
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None, latency, 0, _outputFormat,
                        Guid.Empty);
                else //event sync
                {
                    if (_shareMode == AudioClientShareMode.Exclusive) //exclusive
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback, latency,
                            latency, _outputFormat, Guid.Empty);
                    }
                    else //shared
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback, 0, 0,
                            _outputFormat, Guid.Empty);
                        //latency = (int)(_audioClient.StreamLatency / reftimesPerMillisecond);
                    }
                }
            }
            catch (CoreAudioAPIException exception)
            {
                if (exception.ErrorCode == unchecked((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                {
                    const long reftimesPerSec = 10000000;
                    int framesInBuffer = _audioClient.GetBufferSize();
                    // ReSharper disable once PossibleLossOfFraction
                    latency = (int)(reftimesPerSec * framesInBuffer / _outputFormat.SampleRate + 0.5);
                    goto AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED_TRY_AGAIN;
                }
                throw;
            }

            if (_audioClient.StreamLatency != 0) //windows 10 returns zero, got no idea why => https://github.com/filoe/cscore/issues/11
            {
                Latency = (int)(_audioClient.StreamLatency / reftimesPerMillisecond);
            }

            if (_eventSync)
            {
                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _renderClient = AudioRenderClient.FromAudioClient(_audioClient);
        }
コード例 #6
0
ファイル: WasapiOut.cs プロジェクト: opcon/cscore
        private void CleanupResources()
        {
            if (_createdResampler && _source is DmoResampler)
            {
                ((DmoResampler)_source).DisposeResamplerOnly();
                _source = null;
            }

            if (_renderClient != null)
            {
                _renderClient.Dispose();
                _renderClient = null;
            }
            if (_audioClient != null && _audioClient.BasePtr != IntPtr.Zero)
            {
                try
                {
                    _audioClient.StopNative();
                    _audioClient.Reset();
                }
                catch (CoreAudioAPIException ex)
                {
                    if (ex.ErrorCode != unchecked((int)0x88890001)) //AUDCLNT_E_NOT_INITIALIZED
                        throw;
                }
                _audioClient.Dispose();
                _audioClient = null;
            }
            if (_eventWaitHandle != null)
            {
                _eventWaitHandle.Close();
                _eventWaitHandle = null;
            }

            _isInitialized = false;
        }
コード例 #7
0
ファイル: WasapiOut.cs プロジェクト: opcon/cscore
        private bool CheckForSupportedFormat(AudioClient audioClient, IEnumerable<WaveFormatExtensible> waveFormats,
			out WaveFormat foundMatch)
        {
            foundMatch = null;
            foreach (WaveFormatExtensible format in waveFormats)
            {
                if (audioClient.IsFormatSupported(_shareMode, format))
                {
                    foundMatch = format;
                    return true;
                }
            }
            return false;
        }
コード例 #8
0
ファイル: WasapiCapture.cs プロジェクト: EQ4/cscore
        private void UninitializeAudioClients()
        {
            if (_audioClient != null)
            {
                _audioClient.Dispose();
                _audioClient = null;
            }
            if (_audioCaptureClient != null)
            {
                _audioCaptureClient.Dispose();
                _audioCaptureClient = null;
            }
            if(_eventWaitHandle != null)
            {
                _eventWaitHandle.Close();
                _eventWaitHandle = null;
            }

            _isInitialized = false;
        }
コード例 #9
0
ファイル: WasapiCapture.cs プロジェクト: EQ4/cscore
        private WaveFormat SetupWaveFormat(WaveFormat waveFormat, AudioClient audioClient)
        {
            WaveFormat closestMatch;
            WaveFormat finalFormat = waveFormat;
            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats =
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels, AudioSubTypes.IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels, AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels, AudioSubTypes.Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 8,  waveFormat.Channels, AudioSubTypes.Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            //no format found...
                            possibleFormats = new[]
                            {
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 2, AudioSubTypes.IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 2, AudioSubTypes.Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 2, AudioSubTypes.Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8,  2, AudioSubTypes.Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 1, AudioSubTypes.IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 1, AudioSubTypes.Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 1, AudioSubTypes.Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8,  1, AudioSubTypes.Pcm)
                            };

                            if (CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                throw new NotSupportedException("Could not find a supported format.");
                            }
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }
            }

            return finalFormat;
        }
コード例 #10
0
ファイル: WasapiCapture.cs プロジェクト: EQ4/cscore
        private void InitializeInternal()
        {
            var defaultFormat = _waveFormat;

            _audioClient = AudioClient.FromMMDevice(Device);
            /*if (_shareMode == AudioClientShareMode.Exclusive)
            {
                _waveFormat = _waveFormat ?? _audioClient.MixFormat;
            }
            else
            {
                _waveFormat = _waveFormat ?? _audioClient.MixFormat;
            }*/
            _waveFormat = _waveFormat ?? _audioClient.MixFormat;

            _waveFormat = SetupWaveFormat(_waveFormat, _audioClient);

            if (!_eventSync)
            {
                _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None | GetStreamFlags(), _latency * ReftimesPerMillisecond, 0, _waveFormat, Guid.Empty);
            }
            else
            {
                if (_shareMode == AudioClientShareMode.Exclusive)
                {
                    try
                    {
                        _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), _latency * ReftimesPerMillisecond, _latency * ReftimesPerMillisecond, _waveFormat, Guid.Empty);
                    }
                    catch (CoreAudioAPIException e)
                    {
                        if (e.ErrorCode == unchecked((int)0x88890019)) //AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED
                        {
                            int bufferSize = _audioClient.BufferSize;
                            _audioClient.Dispose();
                            long hnsRequestedDuration = (long)(((double)ReftimesPerMillisecond * 1000 / _waveFormat.SampleRate * bufferSize) + 0.5);
                            _audioClient = AudioClient.FromMMDevice(Device);
                            if (defaultFormat == null)
                                _waveFormat = _audioClient.MixFormat;
                            _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), hnsRequestedDuration, hnsRequestedDuration, _waveFormat, Guid.Empty);
                        }
                    }
                }
                else
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlagsEventCallback | GetStreamFlags(), 0, 0, _waveFormat, Guid.Empty);
                    if(_audioClient.StreamLatency > 0)
                    {
                        _latency = (int) (_audioClient.StreamLatency / ReftimesPerMillisecond);
                    }
                }

                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _audioCaptureClient = AudioCaptureClient.FromAudioClient(_audioClient);
        }
コード例 #11
0
ファイル: AudioClientTests.cs プロジェクト: hoangduit/cscore
 private bool IsFormatSupported(WaveFormat waveFormat, AudioClientShareMode sharemode, AudioClient audioClient)
 {
     return audioClient.IsFormatSupported(sharemode, waveFormat);
 }
コード例 #12
0
ファイル: WasapiOut.cs プロジェクト: CheViana/AudioLab
        private WaveFormat SetupWaveFormat(WaveFormat waveFormat, AudioClient audioClient)
        {
            WaveFormatExtensible closestMatch;
            WaveFormat finalFormat = waveFormat;
            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats = new WaveFormatExtensible[]
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 8,  waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            //no format found...
                            possibleFormats = new WaveFormatExtensible[]
                            {
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 2, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8,  2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 1, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 1, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 1, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8,  1, DMO.MediaTypes.MEDIATYPE_Pcm)
                            };

                            if (CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                throw new NotSupportedException("Could not find a supported format.");
                            }
                        }
                    }

                    finalFormat = mixformat;
                    //todo: implement channel matrix
                    DmoResampler resampler = new DmoResampler(_source, finalFormat);
                    resampler.Quality = 60;
                    _source = resampler;
                    _createdResampler = true;
                }
                else
                {
                    finalFormat = closestMatch;
                }
            }

            return finalFormat;
        }
コード例 #13
0
ファイル: WasapiOut.cs プロジェクト: CheViana/AudioLab
        private void InitializeInternal()
        {
            _audioClient = AudioClient.FromMMDevice(Device);
            _outputFormat = SetupWaveFormat(_source.WaveFormat, _audioClient);

            long latency = _latency * 10000;

            if (!_eventSync)
            {
                _audioClient.Initialize(_shareMode, AudioClientStreamFlags.None, latency, 0, _outputFormat, Guid.Empty);
            }
            else //event sync
            {
                if (_shareMode == AudioClientShareMode.Exclusive) //exclusive
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlags_EventCallback, latency, latency, _outputFormat, Guid.Empty);
                }
                else //shared
                {
                    _audioClient.Initialize(_shareMode, AudioClientStreamFlags.StreamFlags_EventCallback, 0, 0, _outputFormat, Guid.Empty);
                    _latency = (int)(_audioClient.StreamLatency / 10000);
                }

                _eventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                _audioClient.SetEventHandle(_eventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }

            _renderClient = AudioRenderClient.FromAudioClient(_audioClient);
            _simpleAudioVolume = SimpleAudioVolume.FromAudioClient(_audioClient);
            _simpleAudioVolume.MasterVolume = 1f;
        }