IsFormatSupported() public method

Determines whether if the specified output format is supported
public IsFormatSupported ( AudioClientShareMode shareMode, WaveFormat desiredFormat ) : bool
shareMode AudioClientShareMode The share mode.
desiredFormat NAudio.Wave.WaveFormat The desired format.
return bool
Example #1
0
        /// <summary>
        /// Creates a new WASAPI Output
        /// </summary>
        /// <param name="device">Device to use</param>
        /// <param name="shareMode"></param>
        /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
        /// <param name="latency"></param>
		public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm)
        {
            this.audioClient = device.AudioClient;
            this.shareMode = shareMode;
            this.isUsingEventSync = useEventSync;
            this.latencyMilliseconds = latency;
			this.pcm = pcm;
			this.outputFormat = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount);
			NAudio.Wave.WaveFormatExtensible closestSampleRateFormat;
            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
				throw new NotSupportedException("PCM format mismatch");
			Init();
			bufferFrameCount = audioClient.BufferSize;
			readBuffers = new AudioBuffer[2];
			readBuffers[0] = new AudioBuffer(pcm, bufferFrameCount);
			readBuffers[1] = new AudioBuffer(pcm, bufferFrameCount);
			//if (this.shareMode == AudioClientShareMode.Exclusive)
			//    this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000);
        }
Example #2
0
        private void InitializeCaptureDevice(IAudioClient audioClientInterface)
        {
            var audioClient = new AudioClient((IAudioClient)audioClientInterface);
            if (waveFormat == null)
            {                
                this.waveFormat = audioClient.MixFormat;
            }         

            long requestedDuration = REFTIMES_PER_MILLISEC * 100;

            
            if (!audioClient.IsFormatSupported(AudioClientShareMode.Shared, WaveFormat))
            {
                throw new ArgumentException("Unsupported Wave Format");
            }
            
            var streamFlags = GetAudioClientStreamFlags();

            audioClient.Initialize(AudioClientShareMode.Shared,
                streamFlags,
                requestedDuration,
                0,
                this.waveFormat,
                Guid.Empty);
           

            int bufferFrameCount = audioClient.BufferSize;
            this.bytesPerFrame = this.waveFormat.Channels * this.waveFormat.BitsPerSample / 8;
            this.recordBuffer = new byte[bufferFrameCount * bytesPerFrame];
            Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));

            // Get back the effective latency from AudioClient
            latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
        }
Example #3
0
        private void InitializeCaptureDevice()
        {
            if (initialized)
            {
                return;
            }

            if (isMixed)
            {
                long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength;

                if (!audioClientList[0].IsFormatSupported(ShareMode, waveFormatList[0]) || !audioClientList[1].IsFormatSupported(ShareMode, waveFormatList[1]))
                {
                    throw new ArgumentException("Unsupported Wave Format");
                }

                var streamFlags = GetAudioClientStreamFlags();

                // Normal setup for both sharedMode
                audioClientList[0].Initialize(ShareMode,
                                              streamFlags,
                                              requestedDuration,
                                              0,
                                              waveFormatList[0],
                                              Guid.Empty);

                audioClientList[1].Initialize(ShareMode,
                                              AudioClientStreamFlags.None,
                                              requestedDuration,
                                              0,
                                              waveFormatList[1],
                                              Guid.Empty);

                int bufferFrameCount = audioClientList[0].BufferSize;
                bytesPerFrame = waveFormatList[0].Channels * waveFormatList[0].BitsPerSample / 8;
                int bufferFrameCount2 = audioClientList[1].BufferSize;
                bytesPerFrame2 = waveFormatList[1].Channels * waveFormatList[1].BitsPerSample / 8;

                //recordBuffer = new byte[bufferFrameCount * bytesPerFrame + bufferFrameCount2 * bytesPerFrame2];
                recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];
                recordBuffer2 = new byte[bufferFrameCount2 * bytesPerFrame2];
            }
            else
            {
                long requestedDuration = ReftimesPerMillisec * audioBufferMillisecondsLength;

                if (!audioClient.IsFormatSupported(ShareMode, waveFormat))
                {
                    throw new ArgumentException("Unsupported Wave Format");
                }

                var streamFlags = GetAudioClientStreamFlags();

                // If using EventSync, setup is specific with shareMode
                if (isUsingEventSync)
                {
                    // Init Shared or Exclusive
                    if (ShareMode == AudioClientShareMode.Shared)
                    {
                        // With EventCallBack and Shared, both latencies must be set to 0
                        audioClient.Initialize(ShareMode, AudioClientStreamFlags.EventCallback | streamFlags, requestedDuration, 0,
                                               waveFormat, Guid.Empty);
                    }
                    else
                    {
                        // With EventCallBack and Exclusive, both latencies must equals
                        audioClient.Initialize(ShareMode, AudioClientStreamFlags.EventCallback | streamFlags, requestedDuration, requestedDuration,
                                               waveFormat, Guid.Empty);
                    }

                    // Create the Wait Event Handle
                    frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                    audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
                }
                else
                {
                    // Normal setup for both sharedMode
                    audioClient.Initialize(ShareMode,
                                           streamFlags,
                                           requestedDuration,
                                           0,
                                           waveFormat,
                                           Guid.Empty);
                }

                int bufferFrameCount = audioClient.BufferSize;
                bytesPerFrame = waveFormat.Channels * waveFormat.BitsPerSample / 8;
                recordBuffer  = new byte[bufferFrameCount * bytesPerFrame];
            }

            //Debug.WriteLine(string.Format("record buffer size = {0}", this.recordBuffer.Length));

            initialized = true;
        }