Ejemplo n.º 1
0
        internal AudioClient(IAudioClient client, WaveFormat format = null)
        {
            this.Client = client;
            this.Format = format;

            if (format == null)
            {
                IntPtr p;
                client.GetMixFormat(out p);
                format = Marshal.PtrToStructure(p, typeof(WaveFormat)) as WaveFormat;
                Marshal.FreeCoTaskMem(p);
                format = new WaveFormat(format.Channels, format.SampleRate, 16);
                var sessionId = Guid.Empty;
            }

            this.Format = format;
            try
            {
                var sessionId = Guid.Empty;
                this.Client.Initialize(AudioShareMode.Shared, AudioStreamFlags.EventCallback, 0, 0, format, ref sessionId);
            }
            catch (COMException ex)
            {
                if ((uint)ex.ErrorCode == ResultCodes.AudioClientFormatNotSupported)
                {
                    throw new WaveFormatException("Unsupported wave format.");
                }
                throw;
            }

            this.Initialize();
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Tries to resolve a valid format pointer for the audio client.
        /// </summary>
        /// <param name="audioClient">The audio client to use.</param>
        /// <param name="shareMode">The share mode to use.</param>
        /// <returns>A pointer to a valid format, or zero if one cannot be found.</returns>
        public static IntPtr GetFormatPointer(IAudioClient audioClient, AUDCLNT_SHAREMODE shareMode)
        {
            var formatPtr = IntPtr.Zero;

            if (shareMode == AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED)
            {
                audioClient.GetMixFormat(out formatPtr);
            }
            else
            {
                // Otherwise we need to find a supported format
                foreach (var format in TestWaveFormats)
                {
                    var formatMatch = IntPtr.Zero;
                    var supported   = audioClient.IsFormatSupported(shareMode, format, out formatMatch);

                    if (supported == 0)
                    {
                        formatPtr = format; break;
                    }
                    else if (supported == 1)
                    {
                        formatPtr = formatMatch; break;
                    }
                }

                if (formatPtr == IntPtr.Zero)
                {
                    Assert.Inconclusive("Unable to find a valid format pointer.");
                }
            }

            return(formatPtr);
        }
Ejemplo n.º 3
0
        /// <summary>
        /// The Initialize Audio Client
        /// </summary>
        /// <param name="audioFlow"></param>
        /// <param name="_deviceEnumerator"></param>
        private void InitializeAudioClient()
        {
            //Get Audio Client from device
            COMResult result = _audioDevice.Activate(typeof(IAudioClient).GUID, 0, IntPtr.Zero, out object obj);

            _audioClient = (IAudioClient)obj;
            //Get Audio Meter from device
            result      = _audioDevice.Activate(typeof(IAudioMeterInformation).GUID, 0, IntPtr.Zero, out obj);
            _audioMeter = (IAudioMeterInformation)obj;
            //Get Audio End Point
            result = _audioDevice.Activate(typeof(IAudioEndpointVolume).GUID, 0, IntPtr.Zero, out obj);
            _audioEndpointVolume = (IAudioEndpointVolume)obj;
            _audioEndpointVolume.RegisterControlChangeNotify(classCallBack);
            //Initialize Audio Client.
            _sessionGuid = new Guid();
            result       = _audioClient.GetMixFormat(out waveFormat);
            AudioClientStreamFlags streamFlag = AudioClientStreamFlags.None;

            if (_audioDataFlow == AudioDataFlow.eRender)
            {
                streamFlag = AudioClientStreamFlags.Loopback;
            }
            result = _audioClient.Initialize(AudioClientMode.Shared, streamFlag, 10000000, 0, waveFormat, ref _sessionGuid);
            result = _audioClient.Start();
            //Change wave format here
            SetupWaveFormat(waveFormat);

            result = _audioEndpointVolume.GetChannelCount(out _channelCount);
        }
Ejemplo n.º 4
0
        private void InitializeAudio(AudioDataFlow audioFlow, IMMDeviceEnumerator deviceEnumerator)
        {
            //Get Audio Device
            COMResult result = deviceEnumerator.GetDefaultAudioEndpoint(audioFlow, EndPointRole.eMultimedia, out _audioDevice);

            //Register End point notification
            _notifyClient = new MMNotificationClient();
            result        = deviceEnumerator.RegisterEndpointNotificationCallback(_notifyClient);
            //Get Audio Client from device
            result       = _audioDevice.Activate(typeof(IAudioClient).GUID, 0, IntPtr.Zero, out object obj);
            _audioClient = (IAudioClient)obj;
            //Get Audio Meter from device
            result      = _audioDevice.Activate(typeof(IAudioMeterInformation).GUID, 0, IntPtr.Zero, out obj);
            _audioMeter = (IAudioMeterInformation)obj;
            //Initialize Audio Client.
            _sessionGuid = new Guid();
            result       = _audioClient.GetMixFormat(out waveFormat);
            AudioClientStreamFlags streamFlag = AudioClientStreamFlags.None;

            if (audioFlow == AudioDataFlow.eRender)
            {
                streamFlag = AudioClientStreamFlags.Loopback;
            }
            result = _audioClient.Initialize(AudioClientMode.Shared, streamFlag, 10000000, 0, waveFormat, ref _sessionGuid);
            //Get Capture Client.
            result = _audioClient.GetService(typeof(IAudioCaptureClient).GUID, out obj);
            Marshal.ThrowExceptionForHR((int)result);
            _audioCaptureClient = (IAudioCaptureClient)obj;
            result = _audioClient.Start();
            //Change wave format here
            SetupWaveFormat(waveFormat);
        }
Ejemplo n.º 5
0
        private void StartSilenceGeneration()
        {
            IntPtr mixFormatPtr = IntPtr.Zero;

            try
            {
                Checked(_endpoint.Activate(ref IAudioClientId, ClsCtxEnum.All, IntPtr.Zero, out var obj), "Silence.Activate");
                _audioClientForRendering = (IAudioClient)obj;

                Checked(_audioClientForRendering.GetMixFormat(out mixFormatPtr), "Silence.GetMixFormat");
                WaveFormatEx format = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx));

                CheckFormat(format);

                Checked(_audioClientForRendering.Initialize(AudioClientShareModeEnum.Shared, AudioClientStreamFlagsEnum.None, 10_000_000 * 5, 0, mixFormatPtr, Guid.Empty), "Silence.Initialize");
                Checked(_audioClientForRendering.GetBufferSize(out var bufferSize), "Silence.GetBufferSize");
                Checked(_audioClientForRendering.GetService(IAudioRenderClientId, out var renderObj), "Silence.GetService");

                _audioRenderClient = (IAudioRenderClient)renderObj;

                Checked(_audioClientForRendering.Start(), "Silence.Start");

                _silenceThread      = new Thread(() => SilenceGenerationRoutine(bufferSize, format));
                _silenceThread.Name = "Silence generator";
                _silenceThread.Start();
            }
            catch (Exception e)
            {
                ReleaseComObject(ref _audioClientForRendering);
                ReleaseComObject(ref _audioRenderClient);
                Core.LogError(e, "Faied to StartSilenceGeneration");
            }
            finally
            {
                if (mixFormatPtr != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(mixFormatPtr);
                }
            }
        }
Ejemplo n.º 6
0
 protected void InternalSetup(int dataFlow, int flags, int bufferSize = 0)
 {
     lock (mutex)
     {
         int hr; object obj; Guid guid;
         if (endpoint == null)
         {
             if (enumerator == null)
             {
                 enumerator = Activator.CreateInstance(typeof(MMDeviceEnumerator)) as IMMDeviceEnumerator;
                 if (enumerator == null)
                 {
                     throw new NotSupportedException();
                 }
             }
             hr = enumerator.GetDefaultAudioEndpoint((DataFlowEnum)dataFlow, RoleEnum.Multimedia, out endpoint);
             if (hr != 0)
             {
                 Marshal.ThrowExceptionForHR(hr);
             }
             if (endpoint == null)
             {
                 throw new NotSupportedException();
             }
         }
         if (audioClient != null)
         {
             Marshal.ReleaseComObject(audioClient); audioClient = null;
         }
         guid = IID_IAudioClient;
         hr   = endpoint.Activate(ref guid, ClsCtxEnum.ALL, IntPtr.Zero, out obj);
         if (hr != 0)
         {
             Marshal.ThrowExceptionForHR(hr);
         }
         audioClient = obj as IAudioClient;
         if (audioClient == null)
         {
             throw new NotSupportedException();
         }
         IntPtr mixFormatPtr = IntPtr.Zero;
         try
         {
             hr = audioClient.GetMixFormat(out mixFormatPtr);
             if (hr != 0)
             {
                 Marshal.ThrowExceptionForHR(hr);
             }
             WaveFormat outputFormat = (WaveFormat)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormat));
             outputIsFloat = false;
             if (outputFormat.ExtraSize >= WaveFormatEx.WaveFormatExExtraSize)
             {
                 WaveFormatEx ex = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx));
                 if (ex.SubFormat == IeeeFloat)
                 {
                     outputIsFloat = true;
                 }
             }
             sampleRate    = outputFormat.SampleRate;
             channelCount  = outputFormat.Channels;
             bitsPerSample = outputFormat.BitsPerSample;
             if (bufferSize <= 0)
             {
                 bufferSize = 8192;
             }
             latencyMs = (int)(bufferSize * 500 / sampleRate);
             if (latencyMs <= 0)
             {
                 latencyMs = 1;
             }
             hr = audioClient.Initialize(AudioClientShareModeEnum.Shared, (AudioClientStreamFlagsEnum)flags,
                                         latencyMs * 40000, 0, mixFormatPtr, Guid.Empty);
             if (hr != 0)
             {
                 Marshal.ThrowExceptionForHR(hr);
             }
         }
         finally
         {
             if (mixFormatPtr != IntPtr.Zero)
             {
                 Marshal.FreeCoTaskMem(mixFormatPtr);
             }
         }
         hr = audioClient.GetBufferSize(out outputBufferSize);
         if (hr != 0)
         {
             Marshal.ThrowExceptionForHR(hr);
         }
         this.bufferSize = bufferSize;
         buffers         = new float[channelCount][];
         for (int i = 0; i < buffers.Length; i++)
         {
             buffers[i] = new float[bufferSize];
         }
         isInited = true;
     }
 }
        internal static unsafe Factory_WindowsCoreApi.IAudioClient CreateClient(IMMDevice IDevice, out WAVEFORMATEX format, out Type dataFormat)
        {
            Factory_WindowsCoreApi.IAudioClient IAudioClient;
            IDevice.Activate(Factory_WindowsCoreApi.IID_IAudioClient, (uint)PlaybackClient_WindowsCoreApi.CLSCTX.CLSCTX_ALL, new IntPtr(0), out IAudioClient);

            IntPtr rawFormatPtr = new IntPtr();

            IAudioClient.GetMixFormat(out rawFormatPtr);
            WAVEFORMATEX *pFormat = (WAVEFORMATEX *)rawFormatPtr.ToPointer();

            if (pFormat->wBitsPerSample % 8 != 0)
            {
                throw new Exception("Unsupported bits per sample value");
            }
            dataFormat = typeof(byte);
            if (pFormat->wFormatTag == 0xFFFE)
            {
                WAVEFORMATEXTENSIBLE *pFormatEx = (WAVEFORMATEXTENSIBLE *)pFormat;
                if (pFormatEx->SubFormat == FormatEx_IEEE)
                {
                    switch (pFormat->wBitsPerSample)
                    {
                    case 0:
                    case 32: dataFormat = typeof(float); break;

                    case 64: dataFormat = typeof(double); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                }
                else if (pFormatEx->SubFormat == FormatEx_PCM)
                {
                    switch (pFormat->wBitsPerSample)
                    {
                    case 8: dataFormat = typeof(byte); break;

                    case 16: dataFormat = typeof(Int16); break;

                    case 32: dataFormat = typeof(Int32); break;

                    case 64: dataFormat = typeof(Int64); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                }
            }
            else
            {
                switch ((WAVE_FORMAT)pFormat->wFormatTag)
                {
                case WAVE_FORMAT.PCM:
                    switch (pFormat->wBitsPerSample)
                    {
                    case 8: dataFormat = typeof(byte); break;

                    case 16: dataFormat = typeof(Int16); break;

                    case 32: dataFormat = typeof(Int32); break;

                    case 64: dataFormat = typeof(Int64); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                    break;

                case WAVE_FORMAT.IEEE:
                    switch (pFormat->wBitsPerSample)
                    {
                    case 0:
                    case 32: dataFormat = typeof(float); break;

                    case 64: dataFormat = typeof(double); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                    break;
                }
            }
            try
            {
                IAudioClient.Initialize(Factory_WindowsCoreApi.AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, 0, 10000000, 0, new IntPtr(pFormat), Guid.Empty);
            }
            catch { throw new Exception("Unexpected error when creating the client"); }

            format = *pFormat;
            return(IAudioClient);
        }
Ejemplo n.º 8
0
 private void GetMixFormat()
 {
     Marshal.ThrowExceptionForHR(_realAudioClient.GetMixFormat(out mixFormat));
 }
Ejemplo n.º 9
0
        public WaveFormatEx Open()
        {
            StopSilenceThread();

            Log.Information("Opening DesktopAudio");
            IMMDeviceEnumerator deviceEnumerator = null;
            IntPtr mixFormatPtr = IntPtr.Zero;

            try
            {
                bool render = true;
                deviceEnumerator = Activator.CreateInstance(typeof(MMDeviceEnumerator)) as IMMDeviceEnumerator;
                var res = deviceEnumerator.GetDefaultAudioEndpoint(
                    render ? DataFlowEnum.Render : DataFlowEnum.Capture,
                    render ? RoleEnum.Console : RoleEnum.Communications,
                    out _endpoint);

                if (render)
                {
                    StartSilenceGeneration();
                }

                Checked(_endpoint.Activate(ref IAudioClientId, ClsCtxEnum.All, IntPtr.Zero, out var obj), "Activate");
                _audioClient = (IAudioClient)obj;

                Checked(_audioClient.GetMixFormat(out mixFormatPtr), "GetMixFormat");
                WaveFormatEx outputFormat = (WaveFormatEx)Marshal.PtrToStructure(mixFormatPtr, typeof(WaveFormatEx));

                if (!render) // for render it is checked in the StartSilenceGeneration();
                {
                    CheckFormat(outputFormat);
                }

                _bytesPerFrame = outputFormat.BlockAlign;

                var flags = AudioClientStreamFlagsEnum.StreamFlagsEventCallback | (render ? AudioClientStreamFlagsEnum.StreamFlagsLoopback : AudioClientStreamFlagsEnum.None);
                Checked(_audioClient.Initialize(AudioClientShareModeEnum.Shared, flags, 10_000_000 * 5, 0, mixFormatPtr, Guid.Empty), "Initialize");

                Checked(_audioClient.GetService(IAudioCaptureClientId, out var captureObj), "GetService");
                _audioCaptureClient = (IAudioCaptureClient)captureObj;

#pragma warning disable CS0618 // Type or member is obsolete
                Checked(_audioClient.SetEventHandle(_dataAvailable.Handle), "SetEventHandle");
#pragma warning restore CS0618 // Type or member is obsolete
                Checked(_audioClient.Start(), "Start");

                return(outputFormat);
            }
            catch (Exception e)
            {
                Core.LogError(e, "Open desktop audio failed");
                StopSilenceThread();

                ReleaseComFields();
                throw;
            }
            finally
            {
                if (mixFormatPtr != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(mixFormatPtr);
                }

                ReleaseComObject(ref deviceEnumerator);
            }
        }