示例#1
0
        /// <summary>
        /// The Initialize Audio Client
        /// </summary>
        /// <param name="audioFlow"></param>
        /// <param name="_deviceEnumerator"></param>
        private void InitializeAudioClient()
        {
            //Get Audio Client from device
            COMResult result = _audioDevice.Activate(typeof(IAudioClient).GUID, 0, IntPtr.Zero, out object obj);

            _audioClient = (IAudioClient)obj;
            //Get Audio Meter from device
            result      = _audioDevice.Activate(typeof(IAudioMeterInformation).GUID, 0, IntPtr.Zero, out obj);
            _audioMeter = (IAudioMeterInformation)obj;
            //Get Audio End Point
            result = _audioDevice.Activate(typeof(IAudioEndpointVolume).GUID, 0, IntPtr.Zero, out obj);
            _audioEndpointVolume = (IAudioEndpointVolume)obj;
            _audioEndpointVolume.RegisterControlChangeNotify(classCallBack);
            //Initialize Audio Client.
            _sessionGuid = new Guid();
            result       = _audioClient.GetMixFormat(out waveFormat);
            AudioClientStreamFlags streamFlag = AudioClientStreamFlags.None;

            if (_audioDataFlow == AudioDataFlow.eRender)
            {
                streamFlag = AudioClientStreamFlags.Loopback;
            }
            result = _audioClient.Initialize(AudioClientMode.Shared, streamFlag, 10000000, 0, waveFormat, ref _sessionGuid);
            result = _audioClient.Start();
            //Change wave format here
            SetupWaveFormat(waveFormat);

            result = _audioEndpointVolume.GetChannelCount(out _channelCount);
        }
示例#2
0
 /// <summary>
 ///     Initializes the audio stream.
 /// </summary>
 /// <param name="shareMode">
 ///     The sharing mode for the connection. Through this parameter, the client tells the audio engine
 ///     whether it wants to share the audio endpoint device with other clients.
 /// </param>
 /// <param name="streamFlags">Flags to control creation of the stream.</param>
 /// <param name="hnsBufferDuration">
 ///     The buffer capacity as a time value (expressed in 100-nanosecond units). This parameter
 ///     contains the buffer size that the caller requests for the buffer that the audio application will share with the
 ///     audio engine (in shared mode) or with the endpoint device (in exclusive mode). If the call succeeds, the method
 ///     allocates a buffer that is a least this large.
 /// </param>
 /// <param name="hnsPeriodicity">
 ///     The device period. This parameter can be nonzero only in exclusive mode. In shared mode,
 ///     always set this parameter to 0. In exclusive mode, this parameter specifies the requested scheduling period for
 ///     successive buffer accesses by the audio endpoint device. If the requested device period lies outside the range that
 ///     is set by the device's minimum period and the system's maximum period, then the method clamps the period to that
 ///     range. If this parameter is 0, the method sets the device period to its default value. To obtain the default device
 ///     period, call the <see cref="GetDevicePeriodNative" /> method. If the
 ///     <see cref="AudioClientStreamFlags.StreamFlagsEventCallback" /> stream flag is set and
 ///     <see cref="AudioClientShareMode.Exclusive" /> is set as the <paramref name="shareMode" />, then
 ///     <paramref name="hnsPeriodicity" /> must be nonzero and equal to <paramref name="hnsBufferDuration" />.
 /// </param>
 /// <param name="waveFormat">
 ///     The format descriptor. For more information, see
 ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
 /// </param>
 /// <param name="audioSessionGuid">
 ///     A value that identifies the audio session that the stream belongs to. If the
 ///     <see cref="Guid" /> identifies a session that has been previously opened, the method adds the stream to that
 ///     session. If the GUID does not identify an existing session, the method opens a new session and adds the stream to
 ///     that session. The stream remains a member of the same session for its lifetime. Use <see cref="Guid.Empty" /> to
 ///     use the default session.
 /// </param>
 /// <remarks>
 ///     For more information, see
 ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
 /// </remarks>
 public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags,
                        long hnsBufferDuration, long hnsPeriodicity, WaveFormat waveFormat, Guid audioSessionGuid)
 {
     CoreAudioAPIException.Try(
         InitializeNative(shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity, waveFormat, audioSessionGuid),
         InterfaceName, "Initialize");
 }
        private void InitializeAudio(AudioDataFlow audioFlow, IMMDeviceEnumerator deviceEnumerator)
        {
            //Get Audio Device
            COMResult result = deviceEnumerator.GetDefaultAudioEndpoint(audioFlow, EndPointRole.eMultimedia, out _audioDevice);

            //Register End point notification
            _notifyClient = new MMNotificationClient();
            result        = deviceEnumerator.RegisterEndpointNotificationCallback(_notifyClient);
            //Get Audio Client from device
            result       = _audioDevice.Activate(typeof(IAudioClient).GUID, 0, IntPtr.Zero, out object obj);
            _audioClient = (IAudioClient)obj;
            //Get Audio Meter from device
            result      = _audioDevice.Activate(typeof(IAudioMeterInformation).GUID, 0, IntPtr.Zero, out obj);
            _audioMeter = (IAudioMeterInformation)obj;
            //Initialize Audio Client.
            _sessionGuid = new Guid();
            result       = _audioClient.GetMixFormat(out waveFormat);
            AudioClientStreamFlags streamFlag = AudioClientStreamFlags.None;

            if (audioFlow == AudioDataFlow.eRender)
            {
                streamFlag = AudioClientStreamFlags.Loopback;
            }
            result = _audioClient.Initialize(AudioClientMode.Shared, streamFlag, 10000000, 0, waveFormat, ref _sessionGuid);
            //Get Capture Client.
            result = _audioClient.GetService(typeof(IAudioCaptureClient).GUID, out obj);
            Marshal.ThrowExceptionForHR((int)result);
            _audioCaptureClient = (IAudioCaptureClient)obj;
            result = _audioClient.Start();
            //Change wave format here
            SetupWaveFormat(waveFormat);
        }
示例#4
0
        /// <summary>
        /// Initializes the Audio Client
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="streamFlags">Stream Flags</param>
        /// <param name="bufferDuration">Buffer Duration</param>
        /// <param name="periodicity">Periodicity</param>
        /// <param name="waveFormat">Wave Format</param>
        /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
        // Token: 0x0600002E RID: 46 RVA: 0x00003DF8 File Offset: 0x00001FF8
        public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long bufferDuration, long periodicity, WaveFormat waveFormat, Guid audioSessionGuid)
        {
            this.shareMode = shareMode;
            int errorCode = this.audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid);

            Marshal.ThrowExceptionForHR(errorCode);
            this.mixFormat = null;
        }
示例#5
0
 /// <summary>
 ///     Initializes the audio stream.
 /// </summary>
 /// <param name="shareMode">
 ///     The sharing mode for the connection. Through this parameter, the client tells the audio engine
 ///     whether it wants to share the audio endpoint device with other clients.
 /// </param>
 /// <param name="streamFlags">Flags to control creation of the stream.</param>
 /// <param name="hnsBufferDuration">
 ///     The buffer capacity as a time value (expressed in 100-nanosecond units). This parameter
 ///     contains the buffer size that the caller requests for the buffer that the audio application will share with the
 ///     audio engine (in shared mode) or with the endpoint device (in exclusive mode). If the call succeeds, the method
 ///     allocates a buffer that is a least this large.
 /// </param>
 /// <param name="hnsPeriodicity">
 ///     The device period. This parameter can be nonzero only in exclusive mode. In shared mode,
 ///     always set this parameter to 0. In exclusive mode, this parameter specifies the requested scheduling period for
 ///     successive buffer accesses by the audio endpoint device. If the requested device period lies outside the range that
 ///     is set by the device's minimum period and the system's maximum period, then the method clamps the period to that
 ///     range. If this parameter is 0, the method sets the device period to its default value. To obtain the default device
 ///     period, call the <see cref="GetDevicePeriodNative" /> method. If the
 ///     <see cref="AudioClientStreamFlags.StreamFlagsEventCallback" /> stream flag is set and
 ///     <see cref="AudioClientShareMode.Exclusive" /> is set as the <paramref name="shareMode" />, then
 ///     <paramref name="hnsPeriodicity" /> must be nonzero and equal to <paramref name="hnsBufferDuration" />.
 /// </param>
 /// <param name="waveFormat">
 ///     Pointer to the format descriptor. For more information, see
 ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
 /// </param>
 /// <param name="audioSessionGuid">
 ///     A value that identifies the audio session that the stream belongs to. If the
 ///     <see cref="Guid" /> identifies a session that has been previously opened, the method adds the stream to that
 ///     session. If the GUID does not identify an existing session, the method opens a new session and adds the stream to
 ///     that session. The stream remains a member of the same session for its lifetime. Use <see cref="Guid.Empty" /> to
 ///     use the default session.
 /// </param>
 /// <returns>HRESULT</returns>
 /// <remarks>
 ///     For more information, see
 ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
 /// </remarks>
 public unsafe int InitializeNative(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags,
                                    long hnsBufferDuration, long hnsPeriodicity,
                                    IntPtr waveFormat, Guid audioSessionGuid)
 {
     return(InteropCalls.CallI(UnsafeBasePtr, shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity,
                               waveFormat.ToPointer(), audioSessionGuid,
                               ((void **)(*(void **)UnsafeBasePtr))[3]));
 }
示例#6
0
        /// <summary>
        /// The Initialize method initializes the audio stream.
        /// </summary>
        public unsafe int InitializeNative(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity,
                                           WaveFormat waveFormat, Guid audioSessionGuid)
        {
            var hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);
            int result      = -1;

            result = InteropCalls.CallI(_basePtr, shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity, hWaveFormat.AddrOfPinnedObject().ToPointer(), audioSessionGuid, ((void **)(*(void **)_basePtr))[3]);
            hWaveFormat.Free();
            return(result);
        }
示例#7
0
 /// <summary>
 /// Initialize the Audio Client
 /// </summary>
 /// <param name="shareMode">Share Mode</param>
 /// <param name="streamFlags">Stream Flags</param>
 /// <param name="bufferDuration">Buffer Duration</param>
 /// <param name="periodicity">Periodicity</param>
 /// <param name="waveFormat">Wave Format</param>
 /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
 public void Initialize(AudioClientShareMode shareMode,
                        AudioClientStreamFlags streamFlags,
                        long bufferDuration,
                        long periodicity,
                        WaveFormat waveFormat,
                        Guid audioSessionGuid)
 {
     audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid);
     // may have changed the mix format so reset it
     mixFormat = null;
 }
示例#8
0
 /// <summary>
 /// Initialize the Audio Client
 /// </summary>
 /// <param name="shareMode">Share Mode</param>
 /// <param name="streamFlags">Stream Flags</param>
 /// <param name="bufferDuration">Buffer Duration</param>
 /// <param name="periodicity">Periodicity</param>
 /// <param name="waveFormat">Wave Format</param>
 /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
 public void Initialize(AudioClientShareMode shareMode,
     AudioClientStreamFlags streamFlags,
     long bufferDuration,
     long periodicity,
     WaveFormat waveFormat,
     Guid audioSessionGuid)
 {
     audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid);
     // may have changed the mix format so reset it
     mixFormat = null;
 }
示例#9
0
 /// <summary>
 /// Initializes the Audio Client
 /// </summary>
 /// <param name="shareMode">Share Mode</param>
 /// <param name="streamFlags">Stream Flags</param>
 /// <param name="bufferDuration">Buffer Duration</param>
 /// <param name="periodicity">Periodicity</param>
 /// <param name="waveFormat">Wave Format</param>
 /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
 public void Initialize(AudioClientShareMode shareMode,
     AudioClientStreamFlags streamFlags,
     long bufferDuration,
     long periodicity,
     WaveFormat waveFormat,
     Guid audioSessionGuid)
 {
     this.shareMode = shareMode;
     int hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid);
     Marshal.ThrowExceptionForHR(hresult);
     // may have changed the mix format so reset it
     mixFormat = null;
 }
示例#10
0
        /// <summary>
        /// Initializes the Audio Client
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="streamFlags">Stream Flags</param>
        /// <param name="bufferDuration">Buffer Duration</param>
        /// <param name="periodicity">Periodicity</param>
        /// <param name="waveFormat">Wave Format</param>
        /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
        public void Initialize(AudioClientShareMode shareMode,
                               AudioClientStreamFlags streamFlags,
                               long bufferDuration,
                               long periodicity,
                               WaveFormatProvider waveFormat,
                               Guid audioSessionGuid)
        {
            int hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid);

            Marshal.ThrowExceptionForHR(hresult);
            // may have changed the mix format so reset it
            mixFormat = null;
        }
示例#11
0
        private void selectDeviceImpl(string devId)
        {
            releaseDevice();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return(di.DeviceId == devId); });

            if (_capDevice == null)
            {
#warning 例外
            }
            _capDeviceId = _capDevice.Id;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            if (_capDevice.DataFlow == EDataFlow.eRender)
            {
                streamFlags = AudioClientStreamFlags.Loopback |
                              AudioClientStreamFlags.EventCallback; // 実際は発生してくれない
            }
            // フォーマット
            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;
                _pitchAnalyzer.SampleFrequency = (double)(_capFormat.nSamplesPerSec);

                // 初期化
                _audioClient.Initialize(AudioClientShareMode.Shared,
                                        streamFlags, 300 /*ms*/ * 10000, 0, _capFormat, Guid.Empty);
                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                DeviceSelectedEventHandler del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
#warning エラー処理
                _audioClient = null;
                _capClient   = null;
            }
        }
示例#12
0
        /// <summary>
        ///     Initializes the audio stream.
        /// </summary>
        /// <param name="shareMode">
        ///     The sharing mode for the connection. Through this parameter, the client tells the audio engine
        ///     whether it wants to share the audio endpoint device with other clients.
        /// </param>
        /// <param name="streamFlags">Flags to control creation of the stream.</param>
        /// <param name="hnsBufferDuration">
        ///     The buffer capacity as a time value (expressed in 100-nanosecond units). This parameter
        ///     contains the buffer size that the caller requests for the buffer that the audio application will share with the
        ///     audio engine (in shared mode) or with the endpoint device (in exclusive mode). If the call succeeds, the method
        ///     allocates a buffer that is a least this large.
        /// </param>
        /// <param name="hnsPeriodicity">
        ///     The device period. This parameter can be nonzero only in exclusive mode. In shared mode,
        ///     always set this parameter to 0. In exclusive mode, this parameter specifies the requested scheduling period for
        ///     successive buffer accesses by the audio endpoint device. If the requested device period lies outside the range that
        ///     is set by the device's minimum period and the system's maximum period, then the method clamps the period to that
        ///     range. If this parameter is 0, the method sets the device period to its default value. To obtain the default device
        ///     period, call the <see cref="GetDevicePeriodNative" /> method. If the
        ///     <see cref="AudioClientStreamFlags.StreamFlagsEventCallback" /> stream flag is set and
        ///     <see cref="AudioClientShareMode.Exclusive" /> is set as the <paramref name="shareMode" />, then
        ///     <paramref name="hnsPeriodicity" /> must be nonzero and equal to <paramref name="hnsBufferDuration" />.
        /// </param>
        /// <param name="waveFormat">
        ///     The format descriptor. For more information, see
        ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
        /// </param>
        /// <param name="audioSessionGuid">
        ///     A value that identifies the audio session that the stream belongs to. If the
        ///     <see cref="Guid" /> identifies a session that has been previously opened, the method adds the stream to that
        ///     session. If the GUID does not identify an existing session, the method opens a new session and adds the stream to
        ///     that session. The stream remains a member of the same session for its lifetime. Use <see cref="Guid.Empty" /> to
        ///     use the default session.
        /// </param>
        /// <returns>HRESULT</returns>
        /// <remarks>
        ///     For more information, see
        ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
        /// </remarks>
        public unsafe int InitializeNative(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags,
                                           long hnsBufferDuration, long hnsPeriodicity,
                                           WaveFormat waveFormat, Guid audioSessionGuid)
        {
            GCHandle hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);

            try
            {
                return(InteropCalls.CallI(UnsafeBasePtr, shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity,
                                          hWaveFormat.AddrOfPinnedObject().ToPointer(), audioSessionGuid,
                                          ((void **)(*(void **)UnsafeBasePtr))[3]));
            }
            finally
            {
                hWaveFormat.Free();
            }
        }
示例#13
0
        /// <summary>
        /// Initialize the Audio Client
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="streamFlags">Stream Flags</param>
        /// <param name="bufferDuration">Buffer Duration</param>
        /// <param name="periodicity">Periodicity</param>
        /// <param name="waveFormat">Wave Format</param>
        /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
        public int Initialize(AudioClientShareMode shareMode,
                              AudioClientStreamFlags streamFlags,
                              long bufferDuration,
                              long periodicity,
                              ref WaveFormatExtensible waveFormat,
                              Guid audioSessionGuid)
        {
            int hresult = 0;

            hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, ref waveFormat, ref audioSessionGuid);

            if (hresult != 0)
            {
                this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Error Code in AudioClient::Initialize: " + hresult);
            }
            // may have changed the mix format so reset it
            mixFormat = new WaveFormatExtensible();
            return(hresult);
        }
示例#14
0
        private void InitializeCaptureDevice()
        {
            if (this.initialized)
            {
                return;
            }
            long num = 1000000L;

            if (!this.audioClient.IsFormatSupported(this.ShareMode, this.waveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }
            AudioClientStreamFlags audioClientStreamFlags = this.GetAudioClientStreamFlags();

            if (this.isUsingEventSync)
            {
                if (this.ShareMode == AudioClientShareMode.Shared)
                {
                    this.audioClient.Initialize(this.ShareMode, AudioClientStreamFlags.EventCallback, num, 0L, this.waveFormat, Guid.Empty);
                }
                else
                {
                    this.audioClient.Initialize(this.ShareMode, AudioClientStreamFlags.EventCallback, num, num, this.waveFormat, Guid.Empty);
                }
                this.frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                this.audioClient.SetEventHandle(this.frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                this.audioClient.Initialize(this.ShareMode, audioClientStreamFlags, num, 0L, this.waveFormat, Guid.Empty);
            }
            int bufferSize = this.audioClient.BufferSize;

            this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8;
            this.recordBuffer  = new byte[bufferSize * this.bytesPerFrame];
            this.initialized   = true;
        }
示例#15
0
文件: AudioClient.cs 项目: tihilv/Vkm
        public void Initialize(
            AudioClientShareMode shareMode,
            AudioClientStreamFlags streamFlags,
            long bufferDuration, long periodicity,
            WAVEFORMATEXTENSIBLE format, Guid audioSessionGuid)
        {
            int hr = _RealClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid);

            Marshal.ThrowExceptionForHR(hr);

            if ((streamFlags & AudioClientStreamFlags.EventCallback) != 0)
            {
                _audioSampleReady = new AutoResetEvent(false);
                IntPtr eventHandle = CreateEventEx(IntPtr.Zero, "audioSampleReady", CreateEventFlags.None, AccessRight.Synchronize | AccessRight.EventModifyState);
                _audioSampleReady.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(eventHandle, true);

                hr = _RealClient.SetEventHandle(eventHandle);
                Marshal.ThrowExceptionForHR(hr);

                _audioSampleReadyRegistered = ThreadPool.RegisterWaitForSingleObject(
                    _audioSampleReady, new WaitOrTimerCallback(sampleReady), null, -1, false);
            }
            _isInitialized = true;
        }
示例#16
0
        public void SelectDevice(string devId)
        {
            _capDevice = _devices.GetDevice(devId.Trim());

            if (_capDevice == null)
            {
                _audioClient = null;
                _capClient   = null;
                return;
            }

            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode   shareMode   = AudioClientShareMode.Exclusive;
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;

            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 2;
                _capFormat.nSamplesPerSec = 16000;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化
                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode, streamFlags,
                                            tmp2, tmp2,
                                            _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    try
                    {
                        AudioClientError error = (AudioClientError)ex.ErrorCode;
                        switch (error)
                        {
                        case AudioClientError.BufferSizeNotAligned:
                            uint bufSize = _audioClient.BufferSize;
                            tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                            _audioClient.Initialize(shareMode,
                                                    streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                            break;

                        case AudioClientError.UnsupportedFormat:

                            break;
                        }
                    }
                    catch (InvalidCastException)
                    {
                    }
                }

                _capClient = _audioClient.AudioCaptureClient;
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }
示例#17
0
        public void Initialize(
            AudioClientShareMode shareMode,
            AudioClientStreamFlags streamFlags,
            long bufferDuration, long periodicity,
            WAVEFORMATEXTENSIBLE format, Guid audioSessionGuid)
        {
            int hr = _RealClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid);
            Marshal.ThrowExceptionForHR(hr);

            if ((streamFlags & AudioClientStreamFlags.EventCallback) != 0)
            {
                _audioSampleReady = new AutoResetEvent(false);
                IntPtr eventHandle = CreateEventEx(IntPtr.Zero, "audioSampleReady", CreateEventFlags.None, AccessRight.Synchronize | AccessRight.EventModifyState);
                _audioSampleReady.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(eventHandle, true);

                hr = _RealClient.SetEventHandle(eventHandle);
                Marshal.ThrowExceptionForHR(hr);

                _audioSampleReadyRegistered = ThreadPool.RegisterWaitForSingleObject(
                    _audioSampleReady, new WaitOrTimerCallback(sampleReady), null, -1, false);
            }
            _isInitialized = true;
        }
示例#18
0
 /// <summary>
 ///     Initializes the audio stream.
 /// </summary>
 /// <param name="shareMode">
 ///     The sharing mode for the connection. Through this parameter, the client tells the audio engine
 ///     whether it wants to share the audio endpoint device with other clients.
 /// </param>
 /// <param name="streamFlags">Flags to control creation of the stream.</param>
 /// <param name="hnsBufferDuration">
 ///     The buffer capacity as a time value (expressed in 100-nanosecond units). This parameter
 ///     contains the buffer size that the caller requests for the buffer that the audio application will share with the
 ///     audio engine (in shared mode) or with the endpoint device (in exclusive mode). If the call succeeds, the method
 ///     allocates a buffer that is a least this large.
 /// </param>
 /// <param name="hnsPeriodicity">
 ///     The device period. This parameter can be nonzero only in exclusive mode. In shared mode,
 ///     always set this parameter to 0. In exclusive mode, this parameter specifies the requested scheduling period for
 ///     successive buffer accesses by the audio endpoint device. If the requested device period lies outside the range that
 ///     is set by the device's minimum period and the system's maximum period, then the method clamps the period to that
 ///     range. If this parameter is 0, the method sets the device period to its default value. To obtain the default device
 ///     period, call the <see cref="GetDevicePeriodNative" /> method. If the
 ///     <see cref="AudioClientStreamFlags.StreamFlagsEventCallback" /> stream flag is set and
 ///     <see cref="AudioClientShareMode.Exclusive" /> is set as the <paramref name="shareMode" />, then
 ///     <paramref name="hnsPeriodicity" /> must be nonzero and equal to <paramref name="hnsBufferDuration" />.
 /// </param>
 /// <param name="waveFormat">
 ///     Pointer to the format descriptor. For more information, see
 ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
 /// </param>
 /// <param name="audioSessionGuid">
 ///     A value that identifies the audio session that the stream belongs to. If the
 ///     <see cref="Guid" /> identifies a session that has been previously opened, the method adds the stream to that
 ///     session. If the GUID does not identify an existing session, the method opens a new session and adds the stream to
 ///     that session. The stream remains a member of the same session for its lifetime. Use <see cref="Guid.Empty" /> to
 ///     use the default session.
 /// </param>
 /// <returns>HRESULT</returns>
 /// <remarks>
 ///     For more information, see
 ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
 /// </remarks>
 public unsafe int InitializeNative(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags,
     long hnsBufferDuration, long hnsPeriodicity,
     IntPtr waveFormat, Guid audioSessionGuid)
 {
     return InteropCalls.CallI(UnsafeBasePtr, shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity,
         waveFormat.ToPointer(), audioSessionGuid,
         ((void**) (*(void**) UnsafeBasePtr))[3]);
 }
示例#19
0
 /// <summary>
 /// The Initialize method initializes the audio stream.
 /// </summary>
 public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity, WaveFormat waveFormat, Guid audioSessionGuid)
 {
     CoreAudioAPIException.Try(InitializeNative(shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity, waveFormat, audioSessionGuid), c, "Initialize");
 }
示例#20
0
 internal static unsafe int Calli(void *_basePtr, AudioClientShareMode shareMode,
                                  AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity, void *p1,
                                  Guid audioSessionGuid, void *p2)
 {
     throw new NotImplementedException();
 }
示例#21
0
 /// <summary>
 /// The Initialize method initializes the audio stream.
 /// </summary>
 public unsafe int InitializeNative(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity,
                       WaveFormat waveFormat, Guid audioSessionGuid)
 {
     var hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);
     int result = -1;
     result = InteropCalls.CallI(_basePtr, shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity, hWaveFormat.AddrOfPinnedObject().ToPointer(), audioSessionGuid, ((void**)(*(void**)_basePtr))[3]);
     hWaveFormat.Free();
     return result;
 }
示例#22
0
 internal static unsafe int CallI(void* _basePtr, AudioClientShareMode shareMode,
     AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity, void* p1,
     Guid audioSessionGuid, void* p2)
 {
     throw new NotImplementedException();
 }
示例#23
0
        private void selectDeviceImpl(string devId)
        {
            if (_capDevice != null && _capDevice.Id == devId)
            {
                return;
            }

            releaseDeviceImpl();

            _capDevice = _devices.GetDevice(devId.Trim());
            int idx = _deviceInfos.FindIndex((di) => { return(di.DeviceId == devId); });

            if (_capDevice == null)
            {
#warning 例外
                _audioClient = null;
                _capClient   = null;
                return;
            }
            _capDeviceId = _capDevice.Id;

            // モード
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            // デバイスに適した初期化方法を決定
            AudioClientStreamFlags streamFlags = AudioClientStreamFlags.NoPersist;
            switch (shareMode)
            {
            case AudioClientShareMode.Shared:
                switch (_capDevice.DataFlow)
                {
                case EDataFlow.eCapture:
                    streamFlags = 0;
                    break;

                case EDataFlow.eRender:
                    streamFlags = AudioClientStreamFlags.Loopback;
                    break;
                }
                break;

            case AudioClientShareMode.Exclusive:
                streamFlags = AudioClientStreamFlags.NoPersist;
                break;
            }

            // フォーマット
            if (_audioClient != null)
            {
                _capDevice.ReleaseAudioClient();
            }

            // ボリューム
            _masterVolume   = 0;
            _channelVolumes = new double[_capDevice.AudioMeterInformation.PeakValues.Count];
            var h = VolumeChanged;
            if (h != null)
            {
                h(this, new VolumeChangedEventArgs(_capDeviceId, _masterVolume, _channelVolumes));
            }

            try
            {
                _audioClient = _capDevice.AudioClient;
                _capFormat   = _audioClient.MixFormat;

                _capFormat.wFormatTag     = WaveFormatTag.WAVE_FORMAT_EXTENSIBLE;
                _capFormat.nChannels      = 1;
                _capFormat.nSamplesPerSec = 44100;
                _capFormat.wBitsPerSample = 16;
                _capFormat.SubFormat      = CoreAudioApi.AudioMediaSubtypes.MEDIASUBTYPE_PCM;

                _capFormat.wValidBitsPerSample = _capFormat.wBitsPerSample;
                _capFormat.nBlockAlign         = (ushort)(_capFormat.wBitsPerSample / 8.0 * _capFormat.nChannels);
                _capFormat.nAvgBytesPerSec     = _capFormat.nSamplesPerSec * _capFormat.nBlockAlign;

                long tmp1; long tmp2;
                _audioClient.GetDevicePeriod(out tmp1, out tmp2);

                // 初期化

                try
                {
                    WAVEFORMATEXTENSIBLE tmpFmt = new WAVEFORMATEXTENSIBLE();
                    if (!_audioClient.IsFormatSupported(shareMode, _capFormat, ref tmpFmt))
                    {
                        _capFormat = tmpFmt;
                    }
                    _audioClient.Initialize(shareMode,
                                            streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                }
                catch (System.Runtime.InteropServices.COMException ex)
                {
                    if ((uint)ex.ErrorCode == 0x88890019)
                    {
                        uint bufSize = _audioClient.BufferSize;
                        tmp2 = (long)((10000.0 * 1000 / _capFormat.nSamplesPerSec * bufSize) + 0.5);
                        _audioClient.Initialize(shareMode,
                                                streamFlags, tmp2, tmp2, _capFormat, Guid.Empty);
                    }
                }
                clearBuffer();

                _capClient = _audioClient.AudioCaptureClient;

                // イベント発火
                var del = DeviceSelected;
                if (del != null)
                {
                    del.Invoke(this, new DeviceSelectedEventArgs(_capDevice, idx));
                }
            }
            catch (System.Runtime.InteropServices.COMException ex)
            {
                _audioClient = null;
                _capClient   = null;
                throw ex;
            }
        }