Esempio n. 1
0
 /// <summary>
 /// Dispose
 /// </summary>
 public void Dispose()
 {
     if (audioClientInterface != null)
     {
         if (audioClockClient != null)
         {
             audioClockClient.Dispose();
             audioClockClient = null;
         }
         if (audioRenderClient != null)
         {
             audioRenderClient.Dispose();
             audioRenderClient = null;
         }
         if (audioCaptureClient != null)
         {
             audioCaptureClient.Dispose();
             audioCaptureClient = null;
         }
         if (audioStreamVolume != null)
         {
             audioStreamVolume.Dispose();
             audioStreamVolume = null;
         }
         Marshal.ReleaseComObject(audioClientInterface);
         audioClientInterface = null;
         GC.SuppressFinalize(this);
     }
 }
Esempio n. 2
0
        /// <summary>
        /// Dispose
        /// </summary>
        public void Dispose()
        {
            if (audioClient != null)
            {
                Stop();

                audioClient.Dispose();
                audioClient = null;
                renderClient = null;
                NativeMethods.CloseHandle(frameEventWaitHandle);
            }

        }
Esempio n. 3
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public async Task Init(IWaveProvider waveProvider)
        {
            await Activate();
            long latencyRefTimes = latencyMilliseconds*10000;
            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;
            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx 
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;
                    /*WaveFormat.CreateIeeeFloatWaveFormat(
                    audioClient.MixFormat.SampleRate,
                    audioClient.MixFormat.Channels);*/

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats =
                            {
                                new WaveFormatExtensible(
                                    outputFormat.SampleRate, 32,
                                    outputFormat.Channels),
                                new WaveFormatExtensible(
                                    outputFormat.SampleRate, 24,
                                    outputFormat.Channels),
                                new WaveFormatExtensible(
                                    outputFormat.SampleRate, 16,
                                    outputFormat.Channels),
                            };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                //using (new MediaFoundationResampler(waveProvider, outputFormat))
                {
                }
                this.resamplerNeeded = true;
            }
            else
            {
                resamplerNeeded = false;
            }
            this.sourceProvider = waveProvider;

            // Init Shared or Exclusive
            if (shareMode == AudioClientShareMode.Shared)
            {
                // With EventCallBack and Shared, 
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);

                // Get back the effective latency from AudioClient
                latencyMilliseconds = (int) (audioClient.StreamLatency/10000);
            }
            else
            {
                // With EventCallBack and Exclusive, both latencies must equals
                audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                       outputFormat, Guid.Empty);
            }

            // Create the Wait Event Handle
            frameEventWaitHandle = NativeMethods.CreateEventEx(IntPtr.Zero, IntPtr.Zero, 0, EventAccess.EVENT_ALL_ACCESS);
            audioClient.SetEventHandle(frameEventWaitHandle);

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
Esempio n. 4
0
 /// <summary>
 /// Dispose
 /// </summary>
 public void Dispose()
 {
     if (audioClientInterface != null)
     {
         if (audioRenderClient != null)
         {
             audioRenderClient.Dispose();
             audioRenderClient = null;
         }
         if (audioCaptureClient != null)
         {
             audioCaptureClient.Dispose();
             audioCaptureClient = null;
         }
         Marshal.ReleaseComObject(audioClientInterface);
         audioClientInterface = null;
         GC.SuppressFinalize(this);
     }
 }
Esempio n. 5
0
        /// <summary>
        /// Dispose
        /// </summary>
        public void Dispose()
        {
            if (audioClient != null)
            {
                Stop();

                audioClient.Dispose();
                audioClient = null;
                renderClient = null;
            }
        }
Esempio n. 6
0
        /// <summary>
        /// Dispose
        /// </summary>
        public void Dispose()
        {
            if (audioClient != null)
            {
                Stop();

                audioClient.Dispose();
                audioClient = null;
                renderClient = null;
            }
            if (resamplerDmoStream != null)
            {
                resamplerDmoStream.Dispose();
                resamplerDmoStream = null;
            }

        }
Esempio n. 7
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;
            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;
            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx 
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    WaveFormat correctSampleRateFormat = audioClient.MixFormat;

                    if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                    {
                        // Iterate from Worst to Best Format
                        WaveFormatExtensible[] bestToWorstFormats = {
                                                                        new WaveFormatExtensible(
                                                                            outputFormat.SampleRate, 32,
                                                                            outputFormat.Channels),
                                                                        new WaveFormatExtensible(
                                                                            outputFormat.SampleRate, 24,
                                                                            outputFormat.Channels),
                                                                        new WaveFormatExtensible(
                                                                            outputFormat.SampleRate, 16,
                                                                            outputFormat.Channels),
                                                                    };

                        // Check from best Format to worst format ( Float32, Int24, Int16 )
                        for (int i = 0; i < bestToWorstFormats.Length; i++)
                        {
                            correctSampleRateFormat = bestToWorstFormats[i];
                            if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                break;
                            }
                            correctSampleRateFormat = null;
                        }

                        // If still null, then test on the PCM16, 2 channels
                        if (correctSampleRateFormat == null)
                        {
                            // Last Last Last Chance (Thanks WASAPI)
                            correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
                            if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    outputFormat = correctSampleRateFormat;
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                // just check that we can make it.
                resamplerDmoStream = new ResamplerDmoStream(waveProvider, outputFormat);
                sourceProvider = resamplerDmoStream;
            }
            else
            {
                sourceProvider = waveProvider;
            }

            // Normal setup for both sharedMode
            audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                    outputFormat, Guid.Empty);
            

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;

            // set up the read buffer
            bufferFrameCount = audioClient.BufferSize;
            bytesPerFrame = outputFormat.Channels * outputFormat.BitsPerSample / 8;
            readBuffer = new byte[bufferFrameCount * bytesPerFrame];
        }
Esempio n. 8
0
		/// <summary>
		/// Initialize for playing the specified wave stream
		/// </summary>
		/// <param name="waveStream">IWaveProvider to play</param>
		public void Init(IWaveProvider waveStream)
		{
			long latencyRefTimes = latencyMilliseconds*10000;
			outputFormat = waveStream.WaveFormat;
			// first attempt uses the WaveFormat from the WaveStream
			WaveFormatExtensible closestSampleRateFormat;
			if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
			{
				// Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
				// See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx 
				// They say : "In shared mode, the audio engine always supports the mix format"
				// The MixFormat is more likely to be a WaveFormatExtensible.
				if (closestSampleRateFormat == null)
				{
					WaveFormat correctSampleRateFormat = audioClient.MixFormat;
					/*WaveFormat.CreateIeeeFloatWaveFormat(
                        audioClient.MixFormat.SampleRate,
                        audioClient.MixFormat.Channels);*/

					if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
					{
						// Iterate from Worst to Best Format
						WaveFormatExtensible[] bestToWorstFormats = {
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 32,
						                                            		outputFormat.Channels),
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 24,
						                                            		outputFormat.Channels),
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 16,
						                                            		outputFormat.Channels),
						                                            };

						// Check from best Format to worst format ( Float32, Int24, Int16 )
						for (int i = 0; i < bestToWorstFormats.Length; i++)
						{
							correctSampleRateFormat = bestToWorstFormats[i];
							if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
							{
								break;
							}
							correctSampleRateFormat = null;
						}

						// If still null, then test on the PCM16, 2 channels
						if (correctSampleRateFormat == null)
						{
							// Last Last Last Chance (Thanks WASAPI)
							correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
							if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
							{
								throw new NotSupportedException("Can't find a supported format to use");
							}
						}
					}
					outputFormat = correctSampleRateFormat;
				}
				else
				{
					outputFormat = closestSampleRateFormat;
				}

				// just check that we can make it.
				using (new ResamplerDmoStream(waveStream, outputFormat))
				{
				}
				dmoResamplerNeeded = true;
			}
			else
			{
				dmoResamplerNeeded = false;
			}
			sourceStream = waveStream;

			// If using EventSync, setup is specific with shareMode
			if (isUsingEventSync)
			{
				// Init Shared or Exclusive
				if (shareMode == AudioClientShareMode.Shared)
				{
					// With EventCallBack and Shared, both latencies must be set to 0
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0,
					                       outputFormat, Guid.Empty);

					// Get back the effective latency from AudioClient
					latencyMilliseconds = (int) (audioClient.StreamLatency/10000);
				}
				else
				{
					// With EventCallBack and Exclusive, both latencies must equals
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
					                       outputFormat, Guid.Empty);
				}

				// Create the Wait Event Handle
				frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
				audioClient.SetEventHandle(frameEventWaitHandle);
			}
			else
			{
				// Normal setup for both sharedMode
				audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
				                       outputFormat, Guid.Empty);
			}

			// Get the RenderClient
			renderClient = audioClient.AudioRenderClient;
		}
Esempio n. 9
0
        private async void PlayThread()
        {
            await Activate();
            var playbackProvider = Init();
            bool isClientRunning = false;
            try
            {
                if (this.resamplerNeeded)
                {
                    var resampler = new WdlResamplingSampleProvider(playbackProvider.ToSampleProvider(), outputFormat.SampleRate);
                    playbackProvider = new SampleToWaveProvider(resampler);
                }

                // fill a whole buffer
                bufferFrameCount = audioClient.BufferSize;
                bytesPerFrame = outputFormat.Channels*outputFormat.BitsPerSample/8;
                readBuffer = new byte[bufferFrameCount*bytesPerFrame];
                FillBuffer(playbackProvider, bufferFrameCount);
                int timeout = 3 * latencyMilliseconds;
                
                while (playbackState != WasapiOutState.Disposed)
                {
                    if (playbackState != WasapiOutState.Playing)
                    {
                        playThreadEvent.WaitOne(500);
                    }
                    
                    // If still playing and notification is ok
                    if (playbackState == WasapiOutState.Playing)
                    {
                        if (!isClientRunning)
                        {
                            audioClient.Start();
                            isClientRunning = true;
                        }
                        // If using Event Sync, Wait for notification from AudioClient or Sleep half latency
                        var r = NativeMethods.WaitForSingleObjectEx(frameEventWaitHandle, timeout, true);
                        if (r != 0) throw new InvalidOperationException("Timed out waiting for event");
                        // See how much buffer space is available.
                        int numFramesPadding = 0;
                        // In exclusive mode, always ask the max = bufferFrameCount = audioClient.BufferSize
                        numFramesPadding = (shareMode == AudioClientShareMode.Shared) ? audioClient.CurrentPadding : 0;

                        int numFramesAvailable = bufferFrameCount - numFramesPadding;
                        if (numFramesAvailable > 0)
                        {
                            FillBuffer(playbackProvider, numFramesAvailable);
                        }
                    }

                    if (playbackState == WasapiOutState.Stopping)
                    {
                        // play the buffer out
                        while (audioClient.CurrentPadding > 0)
                        {
                            await Task.Delay(latencyMilliseconds / 2);
                        }
                        audioClient.Stop();
                        isClientRunning = false;
                        audioClient.Reset();
                        playbackState = WasapiOutState.Stopped;
                        RaisePlaybackStopped(null);
                    }
                    if (playbackState == WasapiOutState.Disposing)
                    {
                        audioClient.Stop();
                        isClientRunning = false;
                        audioClient.Reset();
                        playbackState = WasapiOutState.Disposed;
                        var disposablePlaybackProvider = playbackProvider as IDisposable;
                        if (disposablePlaybackProvider!=null)
                            disposablePlaybackProvider.Dispose(); // do everything on this thread, even dispose in case it is Media Foundation
                        RaisePlaybackStopped(null);

                    }

                }
            }
            catch (Exception e)
            {
                RaisePlaybackStopped(e);
            }
            finally
            {
                audioClient.Dispose();
                audioClient = null;
                renderClient = null;
                NativeMethods.CloseHandle(frameEventWaitHandle);

            }
        }
Esempio n. 10
0
        /// <summary>
        /// Initialize for playing the specified format
        /// </summary>
        private void Init()
        {
			if (inited)
				return;

            long latencyRefTimes = latencyMilliseconds * 10000;
            // first attempt uses the WaveFormat from the WaveStream

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                        outputFormat, Guid.Empty);

                    // Get back the effective latency from AudioClient
					// This is all wrong! it should be treated differently
					// latencyMilliseconds = (int)(audioClient.StreamLatency / 10000);
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                        outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle);
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                    outputFormat, Guid.Empty);
            }

			waitHandles = new WaitHandle[] { frameEventWaitHandle };

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
			inited = true;
        }