示例#1
0
		/// <summary>
		/// Initialize for playing the specified wave stream
		/// </summary>
		/// <param name="waveStream">IWaveProvider to play</param>
		public void Init(IWaveProvider waveStream)
		{
			long latencyRefTimes = latencyMilliseconds*10000;
			outputFormat = waveStream.WaveFormat;
			// first attempt uses the WaveFormat from the WaveStream
			WaveFormatExtensible closestSampleRateFormat;
			if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
			{
				// Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
				// See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx 
				// They say : "In shared mode, the audio engine always supports the mix format"
				// The MixFormat is more likely to be a WaveFormatExtensible.
				if (closestSampleRateFormat == null)
				{
					WaveFormat correctSampleRateFormat = audioClient.MixFormat;
					/*WaveFormat.CreateIeeeFloatWaveFormat(
                        audioClient.MixFormat.SampleRate,
                        audioClient.MixFormat.Channels);*/

					if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
					{
						// Iterate from Worst to Best Format
						WaveFormatExtensible[] bestToWorstFormats = {
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 32,
						                                            		outputFormat.Channels),
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 24,
						                                            		outputFormat.Channels),
						                                            	new WaveFormatExtensible(
						                                            		outputFormat.SampleRate, 16,
						                                            		outputFormat.Channels),
						                                            };

						// Check from best Format to worst format ( Float32, Int24, Int16 )
						for (int i = 0; i < bestToWorstFormats.Length; i++)
						{
							correctSampleRateFormat = bestToWorstFormats[i];
							if (audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
							{
								break;
							}
							correctSampleRateFormat = null;
						}

						// If still null, then test on the PCM16, 2 channels
						if (correctSampleRateFormat == null)
						{
							// Last Last Last Chance (Thanks WASAPI)
							correctSampleRateFormat = new WaveFormatExtensible(outputFormat.SampleRate, 16, 2);
							if (!audioClient.IsFormatSupported(shareMode, correctSampleRateFormat))
							{
								throw new NotSupportedException("Can't find a supported format to use");
							}
						}
					}
					outputFormat = correctSampleRateFormat;
				}
				else
				{
					outputFormat = closestSampleRateFormat;
				}

				// just check that we can make it.
				using (new ResamplerDmoStream(waveStream, outputFormat))
				{
				}
				dmoResamplerNeeded = true;
			}
			else
			{
				dmoResamplerNeeded = false;
			}
			sourceStream = waveStream;

			// If using EventSync, setup is specific with shareMode
			if (isUsingEventSync)
			{
				// Init Shared or Exclusive
				if (shareMode == AudioClientShareMode.Shared)
				{
					// With EventCallBack and Shared, both latencies must be set to 0
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, 0, 0,
					                       outputFormat, Guid.Empty);

					// Get back the effective latency from AudioClient
					latencyMilliseconds = (int) (audioClient.StreamLatency/10000);
				}
				else
				{
					// With EventCallBack and Exclusive, both latencies must equals
					audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
					                       outputFormat, Guid.Empty);
				}

				// Create the Wait Event Handle
				frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
				audioClient.SetEventHandle(frameEventWaitHandle);
			}
			else
			{
				// Normal setup for both sharedMode
				audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
				                       outputFormat, Guid.Empty);
			}

			// Get the RenderClient
			renderClient = audioClient.AudioRenderClient;
		}
示例#2
0
		/// <summary>
		/// Determines if the specified output format is supported in shared mode
		/// </summary>
		/// <param name="shareMode">Share Mode</param>
		/// <param name="desiredFormat">Desired Format</param>
		/// <param name="closestMatchFormat">Output The closest match format.</param>
		/// <returns>
		/// 	<c>true</c> if [is format supported] [the specified share mode]; otherwise, <c>false</c>.
		/// </returns>
		public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat,
		                              out WaveFormatExtensible closestMatchFormat)
		{
			int hresult = audioClientInterface.IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat);
			// S_OK is 0, S_FALSE = 1
			if (hresult == 0)
			{
				// directly supported
				return true;
			}
			if (hresult == 1)
			{
				return false;
			}
			else if (hresult == (int) AudioClientErrors.UnsupportedFormat)
			{
				return false;
			}
			else
			{
				Marshal.ThrowExceptionForHR(hresult);
			}
			// shouldn't get here
			throw new NotSupportedException("Unknown hresult " + hresult.ToString());
		}