Example #1
0
        /// <summary>
        /// Initializes the capture device. Must be called on the UI (STA) thread.
        /// If not called manually then StartRecording() will call it internally.
        /// </summary>
        public async Task InitAsync()
        {
            if (captureState == WasapiCaptureState.Disposed)
            {
                throw new ObjectDisposedException(nameof(WasapiCaptureRT));
            }
            if (captureState != WasapiCaptureState.Uninitialized)
            {
                throw new InvalidOperationException("Already initialized");
            }

/*            var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 => InitializeCaptureDevice((IAudioClient)ac2));
 *            IActivateAudioInterfaceAsyncOperation activationOperation;
 *            // must be called on UI thread
 *            NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation);
 *
 *            audioClient = new AudioClient((IAudioClient)(await icbh));
 *
 *            hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
 *            audioClient.SetEventHandle(hEvent);*/

            var icbh = new ActivateAudioInterfaceCompletionHandler(ac2 =>
            {
                if (this.audioClientProperties != null)
                {
                    IntPtr p = Marshal.AllocHGlobal(Marshal.SizeOf(this.audioClientProperties.Value));
                    Marshal.StructureToPtr(this.audioClientProperties.Value, p, false);
                    ac2.SetClientProperties(p);
                    Marshal.FreeHGlobal(p);
                    // TODO: consider whether we can marshal this without the need for AllocHGlobal
                }

                InitializeCaptureDevice((IAudioClient2)ac2);
                audioClient = new AudioClient((IAudioClient2)ac2);

                hEvent = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
                audioClient.SetEventHandle(hEvent);
            });

            IActivateAudioInterfaceAsyncOperation activationOperation;

            // must be called on UI thread
            NativeMethods.ActivateAudioInterfaceAsync(device, IID_IAudioClient2, IntPtr.Zero, icbh, out activationOperation);
            await icbh;

            captureState = WasapiCaptureState.Stopped;
        }
Example #2
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        private IWaveProvider Init()
        {
            var  waveProvider    = waveProviderFunc();
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                     * audioClient.MixFormat.SampleRate,
                     * audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                        {
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 32,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 24,
                                outputFormat.Channels),
                            new WaveFormatExtensible(
                                outputFormat.SampleRate, 16,
                                outputFormat.Channels),
                        };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    if (closestSampleRateFormat.Channels == 1 && outputFormat.Channels == 2)
                    {
                        var downmixer = new StereoToMonoProvider16(waveProvider);
                        downmixer.LeftVolume  = 0.5F;
                        downmixer.RightVolume = 0.5F;
                        waveProvider          = downmixer;
                    }

                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                //using (new MediaFoundationResampler(waveProvider, outputFormat))
                {
                }
                this.resamplerNeeded = true;
            }
            else
            {
                resamplerNeeded = false;
            }

            // Init Shared or Exclusive
            if (shareMode == AudioClientShareMode.Shared)
            {
                // With EventCallBack and Shared,
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);

                // Get back the effective latency from AudioClient. On Windows 10 it can be 0
                if (audioClient.StreamLatency > 0)
                {
                    latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
            }
            else
            {
                // With EventCallBack and Exclusive, both latencies must equals
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                       outputFormat, Guid.Empty);
            }

            // Create the Wait Event Handle
            frameEventWaitHandle = NativeMethods.CreateEventExW(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
            audioClient.SetEventHandle(frameEventWaitHandle);

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
            return(waveProvider);
        }