예제 #1
0
		/// <summary>
		/// Creates a new WASAPI Output
		/// </summary>
		/// <param name="device">Device to use</param>
		/// <param name="shareMode"></param>
		/// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
		/// <param name="latency"></param>
		public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency)
		{
			audioClient = device.AudioClient;
			this.shareMode = shareMode;
			isUsingEventSync = useEventSync;
			latencyMilliseconds = latency;
		}
예제 #2
0
 /// <summary>
 /// Creates a new WASAPI Output
 /// </summary>
 /// <param name="device">Device to use</param>
 /// <param name="shareMode"></param>
 /// <param name="latency"></param>
 public WasapiOutRT(string device, AudioClientShareMode shareMode, int latency)
 {
     this.device = device;
     this.shareMode = shareMode;
     this.latencyMilliseconds = latency;
     this.syncContext = SynchronizationContext.Current;
 }
예제 #3
0
 /// <summary>
 /// Creates a new WASAPI Output
 /// </summary>
 /// <param name="device">Device to use</param>
 /// <param name="shareMode"></param>
 /// <param name="latency"></param>
 public WasapiOutRT(string device, AudioClientShareMode shareMode, int latency)
 {
     this.device = device;
     this.shareMode = shareMode;
     this.latencyMilliseconds = latency;
     this.syncContext = SynchronizationContext.Current;
     playThreadEvent = new AutoResetEvent(false);
 }
예제 #4
0
 /// <summary>
 /// Creates a new WASAPI Output
 /// </summary>
 /// <param name="device">Device to use</param>
 /// <param name="shareMode"></param>
 /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
 /// <param name="latency"></param>
 public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency)
 {
     this.audioClient = device.AudioClient;
     this.shareMode = shareMode;
     this.isUsingEventSync = useEventSync;
     this.latencyMilliseconds = latency;
     this.syncContext = SynchronizationContext.Current;
 }
예제 #5
0
 /// <summary>
 /// Creates a new WASAPI Output
 /// </summary>
 /// <param name="device">Device to use</param>
 /// <param name="shareMode"></param>
 /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
 /// <param name="latency">Desired latency in milliseconds</param>
 public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency)
 {
     audioClient = device.AudioClient;
     mmDevice = device;
     this.shareMode = shareMode;
     isUsingEventSync = useEventSync;
     latencyMilliseconds = latency;
     syncContext = SynchronizationContext.Current;
     outputFormat = audioClient.MixFormat; // allow the user to query the default format for shared mode streams
 }
예제 #6
0
 /// <summary>
 /// Creates a new WASAPI Output device
 /// </summary>
 /// <param name="device">Device to use</param>
 /// <param name="shareMode">Share mode to use</param>
 /// <param name="latency">Latency in milliseconds</param>
 public WasapiOutGuiThread(MMDevice device, AudioClientShareMode shareMode, int latency)
 {
     audioClient = device.AudioClient;
     outputFormat = audioClient.MixFormat;
     this.shareMode = shareMode;
     latencyMilliseconds = latency;
     timer = new Timer();
     timer.Tick += TimerOnTick;
     timer.Interval = latency/2;
 }
예제 #7
0
 /// <summary>
 /// Initialize the Audio Client
 /// </summary>
 /// <param name="shareMode">Share Mode</param>
 /// <param name="streamFlags">Stream Flags</param>
 /// <param name="bufferDuration">Buffer Duration</param>
 /// <param name="periodicity">Periodicity</param>
 /// <param name="waveFormat">Wave Format</param>
 /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
 public void Initialize(AudioClientShareMode shareMode,
     AudioClientStreamFlags streamFlags,
     long bufferDuration,
     long periodicity,
     WaveFormat waveFormat,
     Guid audioSessionGuid)
 {
     audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid);
     // may have changed the mix format so reset it
     mixFormat = null;
 }
예제 #8
0
 /// <summary>
 /// Initializes the Audio Client
 /// </summary>
 /// <param name="shareMode">Share Mode</param>
 /// <param name="streamFlags">Stream Flags</param>
 /// <param name="bufferDuration">Buffer Duration</param>
 /// <param name="periodicity">Periodicity</param>
 /// <param name="waveFormat">Wave Format</param>
 /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
 public void Initialize(AudioClientShareMode shareMode,
     AudioClientStreamFlags streamFlags,
     long bufferDuration,
     long periodicity,
     WaveFormat waveFormat,
     Guid audioSessionGuid)
 {
     this.shareMode = shareMode;
     int hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, waveFormat, ref audioSessionGuid);
     Marshal.ThrowExceptionForHR(hresult);
     // may have changed the mix format so reset it
     mixFormat = null;
 }
예제 #9
0
        public void SetPlaybackSettings(int latency, bool eventMode, bool exclusiveMode, double[] filterValues, bool useAllAvailableChannels)
        {
            this.useAllAvailableChannels = useAllAvailableChannels;
            this.latency      = latency;
            this.eventSync    = eventMode;
            this.filterValues = filterValues;

            if (exclusiveMode)
            {
                this.audioClientShareMode = AudioClientShareMode.Exclusive;
            }
            else
            {
                this.audioClientShareMode = AudioClientShareMode.Shared;
            }
        }
예제 #10
0
        private void PlaySound(string file)
        {
            AudioClientShareMode shareMode = AudioClientShareMode.Shared;

            wasapiOut = new WasapiOut(shareMode, useEventSync: true, 50);
            //wasapiOut.Volume = 1;
            try
            {
                audioFileReader = new AudioFileReader(file);
            }
            catch (Exception)
            {
            }
            //audioFileReader.Volume = 1;
            wasapiOut.Init(audioFileReader);
            wasapiOut.Play();
        }
예제 #11
0
        /// <summary>
        ///     Indicates whether the audio endpoint device
        ///     supports a particular stream format.
        /// </summary>
        /// <param name="shareMode">
        ///     The sharing mode for the stream format. Through this parameter, the client indicates whether it
        ///     wants to use the specified format in exclusive mode or shared mode.
        /// </param>
        /// <param name="waveFormat">The stream format to test whether it is supported by the <see cref="AudioClient" /> or not.</param>
        /// <param name="closestMatch">
        ///     Retrieves the supported format that is closest to the format that the client specified
        ///     through the <paramref name="waveFormat" /> parameter. If <paramref name="shareMode" /> is
        ///     <see cref="AudioClientShareMode.Shared" />, the <paramref name="closestMatch" /> will be always null.
        /// </param>
        /// <returns>
        ///     <c>True</c> if the <paramref name="waveFormat" /> is supported. <c>False</c> if the
        ///     <paramref name="waveFormat" /> is not supported.
        /// </returns>
        /// <remarks>
        ///     For more information, see
        ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370876(v=vs.85).aspx" />.
        /// </remarks>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat, out WaveFormat closestMatch)
        {
            int result = IsFormatSupportedNative(shareMode, waveFormat, out closestMatch);
            switch (result)
            {
                case 0x0:
                    return true;

                case 0x1:
                case unchecked((int) 0x88890008):
                    return false;

                default:
                    CoreAudioAPIException.Try(result, InterfaceName, "IsFormatSupported");
                    return false;
            }
        }
예제 #12
0
        /// <summary>
        ///     Initializes the audio stream.
        /// </summary>
        /// <param name="shareMode">
        ///     The sharing mode for the connection. Through this parameter, the client tells the audio engine
        ///     whether it wants to share the audio endpoint device with other clients.
        /// </param>
        /// <param name="streamFlags">Flags to control creation of the stream.</param>
        /// <param name="hnsBufferDuration">
        ///     The buffer capacity as a time value (expressed in 100-nanosecond units). This parameter
        ///     contains the buffer size that the caller requests for the buffer that the audio application will share with the
        ///     audio engine (in shared mode) or with the endpoint device (in exclusive mode). If the call succeeds, the method
        ///     allocates a buffer that is a least this large.
        /// </param>
        /// <param name="hnsPeriodicity">
        ///     The device period. This parameter can be nonzero only in exclusive mode. In shared mode,
        ///     always set this parameter to 0. In exclusive mode, this parameter specifies the requested scheduling period for
        ///     successive buffer accesses by the audio endpoint device. If the requested device period lies outside the range that
        ///     is set by the device's minimum period and the system's maximum period, then the method clamps the period to that
        ///     range. If this parameter is 0, the method sets the device period to its default value. To obtain the default device
        ///     period, call the <see cref="GetDevicePeriodNative" /> method. If the
        ///     <see cref="AudioClientStreamFlags.StreamFlagsEventCallback" /> stream flag is set and
        ///     <see cref="AudioClientShareMode.Exclusive" /> is set as the <paramref name="shareMode" />, then
        ///     <paramref name="hnsPeriodicity" /> must be nonzero and equal to <paramref name="hnsBufferDuration" />.
        /// </param>
        /// <param name="waveFormat">
        ///     The format descriptor. For more information, see
        ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
        /// </param>
        /// <param name="audioSessionGuid">
        ///     A value that identifies the audio session that the stream belongs to. If the
        ///     <see cref="Guid" /> identifies a session that has been previously opened, the method adds the stream to that
        ///     session. If the GUID does not identify an existing session, the method opens a new session and adds the stream to
        ///     that session. The stream remains a member of the same session for its lifetime. Use <see cref="Guid.Empty" /> to
        ///     use the default session.
        /// </param>
        /// <returns>HRESULT</returns>
        /// <remarks>
        ///     For more information, see
        ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />.
        /// </remarks>
        public unsafe int InitializeNative(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags,
                                           long hnsBufferDuration, long hnsPeriodicity,
                                           WaveFormat waveFormat, Guid audioSessionGuid)
        {
            GCHandle hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);

            try
            {
                return(InteropCalls.CallI(UnsafeBasePtr, shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity,
                                          hWaveFormat.AddrOfPinnedObject().ToPointer(), audioSessionGuid,
                                          ((void **)(*(void **)UnsafeBasePtr))[3]));
            }
            finally
            {
                hWaveFormat.Free();
            }
        }
예제 #13
0
        /// <summary>
        /// Creates a new WasapiOut instance.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior.</param>
        /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, only one single playback for the specified device is possible at once.</param>
        /// <param name="latency">Latency of the playback specified in milliseconds.</param>
        /// <param name="playbackThreadPriority">ThreadPriority of the playbackthread which runs in background and feeds the device with data.</param>
        /// <param name="eventSyncContext">The synchronizationcontext which is used to raise any events like the "Stopped"-event. If the passed value is not null, the events will be called async through the SynchronizationContext.Post() method.</param>
        public WasapiOut(bool eventSync, AudioClientShareMode shareMode, int latency, ThreadPriority playbackThreadPriority, SynchronizationContext eventSyncContext)
        {
            if (!IsSupportedOnCurrentPlatform)
            {
                throw new PlatformNotSupportedException("Wasapi is only supported on Windows Vista and above.");
            }

            if (latency <= 0)
            {
                throw new ArgumentOutOfRangeException("latency");
            }

            _latency   = latency;
            _shareMode = shareMode;
            _eventSync = eventSync;
            _playbackThreadPriority = playbackThreadPriority;
            _syncContext            = SynchronizationContext.Current;
        }
예제 #14
0
        /// <summary>
        /// Initialize the Audio Client
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="streamFlags">Stream Flags</param>
        /// <param name="bufferDuration">Buffer Duration</param>
        /// <param name="periodicity">Periodicity</param>
        /// <param name="waveFormat">Wave Format</param>
        /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
        public int Initialize(AudioClientShareMode shareMode,
                              AudioClientStreamFlags streamFlags,
                              long bufferDuration,
                              long periodicity,
                              ref WaveFormatExtensible waveFormat,
                              Guid audioSessionGuid)
        {
            int hresult = 0;

            hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, ref waveFormat, ref audioSessionGuid);

            if (hresult != 0)
            {
                this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Error Code in AudioClient::Initialize: " + hresult);
            }
            // may have changed the mix format so reset it
            mixFormat = new WaveFormatExtensible();
            return(hresult);
        }
예제 #15
0
        /// <summary>
        /// Determines if the specified output format is supported in shared mode
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="desiredFormat">Desired Format</param>
        /// <param name="closestMatchFormat">Output The closest match format.</param>
        /// <returns>True if the format is supported</returns>
        // Token: 0x06000039 RID: 57 RVA: 0x00004024 File Offset: 0x00002224
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat, out WaveFormatExtensible closestMatchFormat)
        {
            int num = this.audioClientInterface.IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat);

            if (num == 0)
            {
                return(true);
            }
            if (num == 1)
            {
                return(false);
            }
            if (num == -2004287480)
            {
                return(false);
            }
            Marshal.ThrowExceptionForHR(num);
            throw new NotSupportedException("Unknown hresult " + num);
        }
예제 #16
0
        /// <summary>
        /// Creates a new WasapiCapture instance.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
        /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
        /// <param name="latency">Latency of the capture specified in milliseconds.</param>
        /// <param name="captureThreadPriority">ThreadPriority of the capturethread which runs in background and provides the audiocapture itself.</param>
        /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
        public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat, ThreadPriority captureThreadPriority)
        {
            if (!IsSupportedOnCurrentPlatform)
            {
                throw new PlatformNotSupportedException("Wasapi is only supported on Windows Vista and above.");
            }
            if (eventSync && shareMode == AudioClientShareMode.Exclusive)
            {
                throw new ArgumentException("Don't use eventSync in combination with exclusive mode.");
            }

            _eventSync  = eventSync;
            _shareMode  = shareMode;
            _waveFormat = defaultFormat;

            _latency = 100;

            _recordingState = SoundIn.RecordingState.Stopped;
        }
예제 #17
0
        /// <summary>
        /// Creates a new WASAPI Output
        /// </summary>
        /// <param name="device">Device to use</param>
        /// <param name="shareMode"></param>
        /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
        /// <param name="latency"></param>
		public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm)
        {
            this.audioClient = device.AudioClient;
            this.shareMode = shareMode;
            this.isUsingEventSync = useEventSync;
            this.latencyMilliseconds = latency;
			this.pcm = pcm;
			this.outputFormat = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount);
			NAudio.Wave.WaveFormatExtensible closestSampleRateFormat;
            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
				throw new NotSupportedException("PCM format mismatch");
			Init();
			bufferFrameCount = audioClient.BufferSize;
			readBuffers = new AudioBuffer[2];
			readBuffers[0] = new AudioBuffer(pcm, bufferFrameCount);
			readBuffers[1] = new AudioBuffer(pcm, bufferFrameCount);
			//if (this.shareMode == AudioClientShareMode.Exclusive)
			//    this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000);
        }
예제 #18
0
파일: AudioClient.cs 프로젝트: tihilv/Vkm
        public bool IsFormatSupported(AudioClientShareMode shareMode, WAVEFORMATEXTENSIBLE format, ref WAVEFORMATEXTENSIBLE closestMatch)
        {
            int  hr  = _RealClient.IsFormatSupported(shareMode, format, out closestMatch);
            bool ret = false;

            if (hr == 0)
            {
                ret = true;
            }
            else if (hr == 1)
            {
                ret = false;
            }
            else
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            return(ret);
        }
예제 #19
0
 /// <summary>
 /// Creates a new WASAPI Output
 /// </summary>
 /// <param name="device">Device to use</param>
 /// <param name="shareMode"></param>
 /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
 /// <param name="latency"></param>
 public WasapiOut(MMDevice device, AudioClientShareMode shareMode, bool useEventSync, int latency, AudioPCMConfig pcm)
 {
     this.m_settings          = new AudioEncoderSettings(pcm);
     this.audioClient         = device.AudioClient;
     this.shareMode           = shareMode;
     this.isUsingEventSync    = useEventSync;
     this.latencyMilliseconds = latency;
     this.outputFormat        = new NAudio.Wave.WaveFormatExtensible(pcm.SampleRate, pcm.BitsPerSample, pcm.ChannelCount);
     NAudio.Wave.WaveFormatExtensible closestSampleRateFormat;
     if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
     {
         throw new NotSupportedException("PCM format mismatch");
     }
     Init();
     bufferFrameCount = audioClient.BufferSize;
     readBuffers      = new AudioBuffer[2];
     readBuffers[0]   = new AudioBuffer(pcm, bufferFrameCount);
     readBuffers[1]   = new AudioBuffer(pcm, bufferFrameCount);
     //if (this.shareMode == AudioClientShareMode.Exclusive)
     //    this.latencyMilliseconds = (int)(this.audioClient.DefaultDevicePeriod / 10000);
 }
예제 #20
0
        /// <summary>
        /// Determines if the specified output format is supported in shared mode
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="desiredFormat">Desired Format</param>
        /// <param name="closestMatchFormat">Output The closest match format.</param>
        /// <returns>True if the format is supported</returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat, out WaveFormatExtensible closestMatchFormat)
        {
            int hresult = audioClientInterface.IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat);

            // S_OK is 0, S_FALSE = 1
            if (hresult == 0)
            {
                // directly supported
                return(true);
            }
            if (hresult == 1)
            {
                return(false);
            }
            if (hresult == (int)AudioClientErrors.UnsupportedFormat)
            {
                return(false);
            }
            Marshal.ThrowExceptionForHR(hresult);
            // shouldn't get here
            throw new NotSupportedException("Unknown hresult " + hresult);
        }
예제 #21
0
        /// <summary>
        /// Determines if the specified output format is supported in shared mode
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="desiredFormat">Desired Format</param>
        /// <param name="closestMatchFormat">Output The closest match format.</param>
        /// <returns>True if the format is supported</returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat, out WaveFormatExtensible closestMatchFormat)
        {
            IntPtr pointerToPtr = GetPointerToPointer(); // IntPtr.Zero; // Marshal.AllocHGlobal(Marshal.SizeOf<WaveFormatExtensible>());

            closestMatchFormat = null;
            //int hresult = audioClientInterface.IsFormatSupported(shareMode, desiredFormat, pointerToPtr);
            var pDesiredFormat = desiredFormat.AsInterop();
            int hresult        = audioClientInterface.IsFormatSupported(shareMode, ref pDesiredFormat, pointerToPtr);

            var closestMatchPtr = MarshalHelpers.PtrToStructure <IntPtr>(pointerToPtr);

            if (closestMatchPtr != IntPtr.Zero)
            {
                //closestMatchFormat = MarshalHelpers.PtrToStructure<WaveFormatExtensible>(closestMatchPtr);
                closestMatchFormat = MarshalHelpers.PtrToStructure <WaveFormatExtensibleInterop>(closestMatchPtr);
                Marshal.FreeCoTaskMem(closestMatchPtr);
            }
            Marshal.FreeHGlobal(pointerToPtr);
            // S_OK is 0, S_FALSE = 1
            if (hresult == 0)
            {
                // directly supported
                return(true);
            }
            if (hresult == 1)
            {
                return(false);
            }
            if (hresult == (int)AudioClientErrors.UnsupportedFormat)
            {
                // Succeeded but the specified format is not supported in exclusive mode.
                return(shareMode != AudioClientShareMode.Exclusive);
            }
            Marshal.ThrowExceptionForHR(hresult);
            // shouldn't get here
            throw new NotSupportedException("Unknown hresult " + hresult);
        }
예제 #22
0
        /// <summary>
        /// Determines if the specified output format is supported in shared mode
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="desiredFormat">Desired Format</param>
        /// <param name="closestMatchFormat">Output The closest match format.</param>
        /// <returns>
        ///     <c>true</c> if [is format supported] [the specified share mode]; otherwise, <c>false</c>.
        /// </returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormatExtensible desiredFormat, out WaveFormatExtensible closestMatchFormat)
        {
            int hresult = audioClientInterface.IsFormatSupported(shareMode, ref desiredFormat, out closestMatchFormat);

            // S_OK is 0, S_FALSE = 1
            if (hresult == 0)
            {
                // directly supported
                return(true);
            }
            if (hresult == 1)
            {
                return(false);
            }
            else if (hresult == unchecked ((int)0x88890008)) // UnsupportedFormat
            {
                return(false);
            }
            else
            {
                this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Error Code in AudioClient::IsFormatSupported: " + hresult);
                return(false);
            }
        }
예제 #23
0
파일: AudioClient.cs 프로젝트: tihilv/Vkm
        public void Initialize(
            AudioClientShareMode shareMode,
            AudioClientStreamFlags streamFlags,
            long bufferDuration, long periodicity,
            WAVEFORMATEXTENSIBLE format, Guid audioSessionGuid)
        {
            int hr = _RealClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid);

            Marshal.ThrowExceptionForHR(hr);

            if ((streamFlags & AudioClientStreamFlags.EventCallback) != 0)
            {
                _audioSampleReady = new AutoResetEvent(false);
                IntPtr eventHandle = CreateEventEx(IntPtr.Zero, "audioSampleReady", CreateEventFlags.None, AccessRight.Synchronize | AccessRight.EventModifyState);
                _audioSampleReady.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(eventHandle, true);

                hr = _RealClient.SetEventHandle(eventHandle);
                Marshal.ThrowExceptionForHR(hr);

                _audioSampleReadyRegistered = ThreadPool.RegisterWaitForSingleObject(
                    _audioSampleReady, new WaitOrTimerCallback(sampleReady), null, -1, false);
            }
            _isInitialized = true;
        }
예제 #24
0
        /// <summary>
        /// Determines if the specified output format is supported in shared mode
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="desiredFormat">Desired Format</param>
        /// <param name="closestMatchFormat">Output The closest match format.</param>
        /// <returns>True if the format is supported</returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat, out WaveFormatExtensible closestMatchFormat)
        {
            IntPtr pointerToPtr = GetPointerToPointer(); // IntPtr.Zero; // Marshal.AllocHGlobal(Marshal.SizeOf<WaveFormatExtensible>());

            closestMatchFormat = null;
            int hresult = audioClientInterface.IsFormatSupported(shareMode, desiredFormat, pointerToPtr);

            var closestMatchPtr = MarshalHelpers.PtrToStructure <IntPtr>(pointerToPtr);

            if (closestMatchPtr != IntPtr.Zero)
            {
                closestMatchFormat = MarshalHelpers.PtrToStructure <WaveFormatExtensible>(closestMatchPtr);
                Marshal.FreeCoTaskMem(closestMatchPtr);
            }
            Marshal.FreeHGlobal(pointerToPtr);
            // S_OK is 0, S_FALSE = 1
            if (hresult == 0)
            {
                // directly supported
                return(true);
            }
            if (hresult == 1)
            {
                return(false);
            }
            if (hresult == (int)AudioClientErrors.UnsupportedFormat)
            {
                // documentation is confusing as to what this flag means
                // https://docs.microsoft.com/en-us/windows/desktop/api/audioclient/nf-audioclient-iaudioclient-isformatsupported
                // "Succeeded but the specified format is not supported in exclusive mode."
                return(false); // shareMode != AudioClientShareMode.Exclusive;
            }
            Marshal.ThrowExceptionForHR(hresult);
            // shouldn't get here
            throw new NotSupportedException("Unknown hresult " + hresult);
        }
예제 #25
0
 /// <summary>
 /// Determines if the specified output format is supported in shared mode
 /// </summary>
 /// <param name="shareMode">Share Mode</param>
 /// <param name="desiredFormat">Desired Format</param>
 /// <param name="closestMatchFormat">Output The closest match format.</param>
 /// <returns>
 /// 	<c>true</c> if [is format supported] [the specified share mode]; otherwise, <c>false</c>.
 /// </returns>
 public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat, out WaveFormatExtensible closestMatchFormat)
 {
     int hresult = audioClientInterface.IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat);
     // S_OK is 0, S_FALSE = 1
     if (hresult == 0)
     {
         // directly supported
         return true;
     }
     if (hresult == 1)
     {
         return false;
     }
     else if (hresult == (int)AudioClientErrors.UnsupportedFormat)
     {
         return false;
     }
     else
     {
         Marshal.ThrowExceptionForHR(hresult);
     }
     // shouldn't get here
     throw new NotSupportedException("Unknown hresult " + hresult.ToString());
 }
예제 #26
0
 /// <summary>
 /// The Initialize method initializes the audio stream.
 /// </summary>
 public unsafe int InitializeNative(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity,
                       WaveFormat waveFormat, Guid audioSessionGuid)
 {
     var hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);
     int result = -1;
     result = InteropCalls.CallI(_basePtr, shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity, hWaveFormat.AddrOfPinnedObject().ToPointer(), audioSessionGuid, ((void**)(*(void**)_basePtr))[3]);
     hWaveFormat.Free();
     return result;
 }
예제 #27
0
 internal static unsafe int CallI(void* _basePtr, AudioClientShareMode shareMode,
     AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity, void* p1,
     Guid audioSessionGuid, void* p2)
 {
     throw new NotImplementedException();
 }
예제 #28
0
 /// <summary>
 /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
 /// CaptureThreadPriority = AboveNormal.
 /// DefaultFormat = null.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
 /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
 /// <param name="latency">Latency of the capture specified in milliseconds.</param>
 public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency)
     : this(eventSync, shareMode, latency, null)
 {
 }
예제 #29
0
 private AudioClient InitializeClient(AudioClientShareMode shareMode)
 {
     AudioClient audioClient = GetAudioClient();
     WaveFormat waveFormat = audioClient.MixFormat;
     long refTimesPerSecond = 10000000;
     audioClient.Initialize(shareMode,
         AudioClientStreamFlags.None,
         refTimesPerSecond,
         0,
         waveFormat,
         Guid.Empty);
     return audioClient;
 }
예제 #30
0
 public bool IsFormatSupported(AudioClientShareMode shareMode, WAVEFORMATEXTENSIBLE format, ref WAVEFORMATEXTENSIBLE closestMatch)
 {
     int hr = _RealClient.IsFormatSupported(shareMode, format, out closestMatch);
     bool ret = false;
     if (hr == 0) ret = true;
     else if (hr == 1) ret = false;
     else Marshal.ThrowExceptionForHR(hr);
     
     return ret;
 }
예제 #31
0
 /// <summary>
 /// Initializes a new instance of the <see cref="WasapiCapture"/> class. SynchronizationContext = null.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
 /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
 /// <param name="latency">Latency of the capture specified in milliseconds.</param>
 /// <param name="captureThreadPriority">ThreadPriority of the capturethread which runs in background and provides the audiocapture itself.</param>
 /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
 public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat, ThreadPriority captureThreadPriority)
     : this(eventSync, shareMode, latency, defaultFormat, captureThreadPriority, null)
 {
 }
예제 #32
0
 internal static unsafe int Calli(void *_basePtr, AudioClientShareMode shareMode,
                                  AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity, void *p1,
                                  Guid audioSessionGuid, void *p2)
 {
     throw new NotImplementedException();
 }
예제 #33
0
 internal static unsafe int Calli(void *_basePtr, AudioClientShareMode shareMode, void *p1, void *p2, void *p3)
 {
     throw new NotImplementedException();
 }
예제 #34
0
 /// <summary>
 /// WASAPI Out using default audio endpoint
 /// </summary>
 /// <param name="shareMode">ShareMode - shared or exclusive</param>
 /// <param name="latency">Desired latency in milliseconds</param>
 public WasapiOut(AudioClientShareMode shareMode, int latency) :
     this(GetDefaultAudioEndpoint(), shareMode, true, latency, AudioPCMConfig.RedBook)
 {
 }
예제 #35
0
파일: WasapiCapture.cs 프로젝트: EQ4/cscore
 /// <summary>
 /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
 /// CaptureThreadPriority = AboveNormal. 
 /// DefaultFormat = null.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
 /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
 /// <param name="latency">Latency of the capture specified in milliseconds.</param>
 public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency)
     : this(eventSync, shareMode, latency, null)
 {
 }
예제 #36
0
 /// <summary>
 /// The Initialize method initializes the audio stream.
 /// </summary>
 public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity, WaveFormat waveFormat, Guid audioSessionGuid)
 {
     CoreAudioAPIException.Try(InitializeNative(shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity, waveFormat, audioSessionGuid), c, "Initialize");
 }
예제 #37
0
        /// <summary>
        /// Checks whether the audio endpoint device supports a particular stream format.
        /// </summary>
        /// <param name="shareMode"></param>
        /// <param name="waveFormat"></param>
        /// <returns></returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat)
        {
            WaveFormatExtensible tmp;

            return(IsFormatSupported(shareMode, waveFormat, out tmp));
        }
예제 #38
0
 internal static unsafe int CallI(void* _basePtr, AudioClientShareMode shareMode, void* p1, void* p2, void* p3)
 {
     throw new NotImplementedException();
 }
예제 #39
0
        /// <summary>
        /// WASAPI Out using default audio endpoint
        /// </summary>
        /// <param name="shareMode">ShareMode - shared or exclusive</param>
        /// <param name="latency">Desired latency in milliseconds</param>
        public WasapiOutRT(AudioClientShareMode shareMode, int latency) :
            this(GetDefaultAudioEndpoint(), shareMode, latency)
        {

        }
예제 #40
0
파일: WasapiCapture.cs 프로젝트: EQ4/cscore
 /// <summary>
 /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
 /// CaptureThreadPriority = AboveNormal. 
 /// DefaultFormat = null.
 /// Latency = 100ms.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
 /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
 public WasapiCapture(bool eventSync, AudioClientShareMode shareMode)
     : this(eventSync, shareMode, 100)
 {
 }
예제 #41
0
파일: WasapiOut.cs 프로젝트: opcon/cscore
 /// <summary>
 ///     Initializes an new instance of <see cref="WasapiOut" /> class.
 ///     EventSyncContext = SynchronizationContext.Current.
 ///     PlaybackThreadPriority = AboveNormal.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior.</param>
 /// <param name="shareMode">
 ///     Specifies how to open the audio device. Note that if exclusive mode is used, only one single
 ///     playback for the specified device is possible at once.
 /// </param>
 /// <param name="latency">Latency of the playback specified in milliseconds.</param>
 public WasapiOut(bool eventSync, AudioClientShareMode shareMode, int latency)
     : this(eventSync, shareMode, latency, ThreadPriority.AboveNormal)
 {
 }
예제 #42
0
        /// <summary>
        /// Determines whether if the specified output format is supported
        /// </summary>
        /// <param name="shareMode">The share mode.</param>
        /// <param name="desiredFormat">The desired format.</param>
        /// <returns>True if the format is supported</returns>
        // Token: 0x06000038 RID: 56 RVA: 0x0000400C File Offset: 0x0000220C
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat)
        {
            WaveFormatExtensible waveFormatExtensible;

            return(this.IsFormatSupported(shareMode, desiredFormat, out waveFormatExtensible));
        }
예제 #43
0
 /// <summary>
 /// Checks whether the audio endpoint device supports a particular stream format.
 /// </summary>
 /// <param name="shareMode"></param>
 /// <param name="waveFormat"></param>
 /// <returns></returns>
 public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat)
 {
     WaveFormatExtensible tmp;
     return IsFormatSupported(shareMode, waveFormat, out tmp);
 }
예제 #44
0
파일: WasapiCapture.cs 프로젝트: EQ4/cscore
        /// <summary>
        /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
        /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
        /// <param name="latency">Latency of the capture specified in milliseconds.</param>
        /// <param name="captureThreadPriority">ThreadPriority of the capturethread which runs in background and provides the audiocapture itself.</param>
        /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
        /// <param name="synchronizationContext">The <see cref="SynchronizationContext"/> to use to fire events on.</param>
        /// <exception cref="PlatformNotSupportedException">The current platform does not support Wasapi. For more details see: <see cref="IsSupportedOnCurrentPlatform"/>.</exception>
        /// <exception cref="ArgumentException">The <paramref name="eventSync"/> parameter is set to true while the <paramref name="shareMode"/> is set to <see cref="AudioClientShareMode.Exclusive"/>.</exception>
        public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat,
            ThreadPriority captureThreadPriority, SynchronizationContext synchronizationContext)
        {
            if (!IsSupportedOnCurrentPlatform)
                throw new PlatformNotSupportedException("Wasapi is only supported on Windows Vista and above.");
            if (eventSync && shareMode == AudioClientShareMode.Exclusive)
                throw new ArgumentException("Don't use eventSync in combination with exclusive mode.");

            _eventSync = eventSync;
            _shareMode = shareMode;
            _waveFormat = defaultFormat;

            _latency = latency;
            _captureThreadPriority = captureThreadPriority;
            _synchronizationContext = synchronizationContext;

            _recordingState = RecordingState.Stopped;
        }
예제 #45
0
 /// <summary>
 /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
 /// CaptureThreadPriority = AboveNormal.
 /// DefaultFormat = null.
 /// Latency = 100ms.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
 /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
 public WasapiCapture(bool eventSync, AudioClientShareMode shareMode)
     : this(eventSync, shareMode, 100)
 {
 }
예제 #46
0
 /// <summary>
 ///     Indicates whether the audio endpoint device
 ///     supports a particular stream format.
 /// </summary>
 /// <param name="shareMode">
 ///     The sharing mode for the stream format. Through this parameter, the client indicates whether it
 ///     wants to use the specified format in exclusive mode or shared mode.
 /// </param>
 /// <param name="waveFormat">The stream format to test whether it is supported by the <see cref="AudioClient" /> or not.</param>
 /// <returns>
 ///     <c>True</c> if the <paramref name="waveFormat" /> is supported. <c>False</c> if the
 ///     <paramref name="waveFormat" /> is not supported.
 /// </returns>
 /// <remarks>
 ///     For more information, see
 ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370876(v=vs.85).aspx" />.
 /// </remarks>
 public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat)
 {
     WaveFormat tmp;
     return IsFormatSupported(shareMode, waveFormat, out tmp);
 }
예제 #47
0
        /// <summary>
        /// Checks whether the audio endpoint device supports a particular stream format.
        /// </summary>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat, out WaveFormatExtensible closestMatch)
        {
            int result = IsFormatSupportedNative(shareMode, waveFormat, out closestMatch);
            switch (result)
            {
                case 0x0:
                    return true;

                case 0x1:
                case unchecked((int)0x88890008):
                    return false;

                default:
                    CoreAudioAPIException.Try(result, c, "IsFormatSupported");
                    return false;
            }
        }
예제 #48
0
파일: WasapiOut.cs 프로젝트: skpaul/cscore
 /// <summary>
 ///     Initializes an new instance of <see cref="WasapiOut" /> class.
 ///     EventSyncContext = SynchronizationContext.Current.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior.</param>
 /// <param name="shareMode">
 ///     Specifies how to open the audio device. Note that if exclusive mode is used, only one single
 ///     playback for the specified device is possible at once.
 /// </param>
 /// <param name="latency">Latency of the playback specified in milliseconds.</param>
 /// <param name="playbackThreadPriority">
 ///     ThreadPriority of the playbackthread which runs in background and feeds the device
 ///     with data.
 /// </param>
 public WasapiOut(bool eventSync, AudioClientShareMode shareMode, int latency,
                  ThreadPriority playbackThreadPriority)
     : this(eventSync, shareMode, latency, playbackThreadPriority, SynchronizationContext.Current)
 {
 }
예제 #49
0
 /// <summary>
 /// WASAPI Out using default audio endpoint
 /// </summary>
 /// <param name="shareMode">ShareMode - shared or exclusive</param>
 /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
 /// <param name="latency">Desired latency in milliseconds</param>
 public WasapiOut(AudioClientShareMode shareMode, bool useEventSync, int latency)
     : this(GetDefaultAudioEndpoint(), shareMode, useEventSync, latency)
 {
 }
예제 #50
0
 /// <summary>
 /// WASAPI Out using default audio endpoint
 /// </summary>
 /// <param name="shareMode">ShareMode - shared or exclusive</param>
 /// <param name="useEventSync">true if sync is done with event. false use sleep.</param>
 /// <param name="latency">Desired latency in milliseconds</param>
 public WasapiOut(AudioClientShareMode shareMode, bool useEventSync, int latency) :
     this(GetDefaultAudioEndpoint(), shareMode, useEventSync, latency)
 {
 }
예제 #51
0
파일: WasapiOut.cs 프로젝트: skpaul/cscore
 /// <summary>
 ///     Initializes an new instance of <see cref="WasapiOut" /> class.
 ///     EventSyncContext = SynchronizationContext.Current.
 ///     PlaybackThreadPriority = AboveNormal.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior.</param>
 /// <param name="shareMode">
 ///     Specifies how to open the audio device. Note that if exclusive mode is used, only one single
 ///     playback for the specified device is possible at once.
 /// </param>
 /// <param name="latency">Latency of the playback specified in milliseconds.</param>
 public WasapiOut(bool eventSync, AudioClientShareMode shareMode, int latency)
     : this(eventSync, shareMode, latency, ThreadPriority.AboveNormal)
 {
 }
예제 #52
0
파일: WasapiOut.cs 프로젝트: opcon/cscore
        /// <summary>
        ///     Initializes an new instance of <see cref="WasapiOut" /> class.
        ///     EventSyncContext = SynchronizationContext.Current.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior.</param>
        /// <param name="shareMode">
        ///     Specifies how to open the audio device. Note that if exclusive mode is used, only one single
        ///     playback for the specified device is possible at once.
        /// </param>
        /// <param name="latency">Latency of the playback specified in milliseconds.</param>
        /// <param name="playbackThreadPriority">
        ///     ThreadPriority of the playbackthread which runs in background and feeds the device
        ///     with data.
        /// </param>
        public WasapiOut(bool eventSync, AudioClientShareMode shareMode, int latency,
			ThreadPriority playbackThreadPriority)
            : this(eventSync, shareMode, latency, playbackThreadPriority, SynchronizationContext.Current)
        {
        }
예제 #53
0
 /// <summary>
 /// WASAPI Out using default audio endpoint
 /// </summary>
 /// <param name="shareMode">ShareMode - shared or exclusive</param>
 /// <param name="latency">Desired latency in milliseconds</param>
 public WasapiOut(AudioClientShareMode shareMode, int latency) :
     this(GetDefaultAudioEndpoint(), shareMode, true, latency)
 {
 }
예제 #54
0
        /// <summary>
        /// The IsFormatSupportedInternal method indicates whether the audio endpoint device
        /// supports a particular stream format.
        /// </summary>
        /// <returns>For exclusive mode, IsFormatSupportedInternal returns S_OK if the audio
        /// endpoint device supports the caller-specified format, or it returns
        /// AUDCLNT_E_UNSUPPORTED_FORMAT if the device does not support the format. The
        /// ppClosestMatch parameter can be NULL. If it is not NULL, the method writes NULL to
        /// *ppClosestMatch. For shared mode, if the audio engine supports the caller-specified
        /// format, IsFormatSupportedInternal sets *ppClosestMatch to NULL and returns S_OK. If the
        /// audio engine does not support the caller-specified format but does support a similar
        /// format, the method retrieves the similar format through the ppClosestMatch parameter and
        /// returns S_FALSE. If the audio engine does not support the caller-specified format or any
        /// similar format, the method sets *ppClosestMatch to NULL and returns
        /// AUDCLNT_E_UNSUPPORTED_FORMAT.</returns>
        public unsafe int IsFormatSupportedNative(AudioClientShareMode shareMode, WaveFormat waveFormat, out WaveFormatExtensible closestMatch)
        {
            closestMatch = null;
            var hClosestMatch = GCHandle.Alloc(closestMatch, GCHandleType.Pinned);
            var hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);

            IntPtr pclosestmatch = hClosestMatch.AddrOfPinnedObject();

            var result = InteropCalls.CallI(_basePtr, shareMode, hWaveFormat.AddrOfPinnedObject().ToPointer(),
                &pclosestmatch, ((void**)(*(void**)_basePtr))[7]);

            hWaveFormat.Free();
            hClosestMatch.Free();

            return result;
        }
예제 #55
0
 /// <summary>
 /// Determines whether if the specified output format is supported
 /// </summary>
 /// <param name="shareMode">The share mode.</param>
 /// <param name="desiredFormat">The desired format.</param>
 /// <returns>
 /// 	<c>true</c> if [is format supported] [the specified share mode]; otherwise, <c>false</c>.
 /// </returns>
 public bool IsFormatSupported(AudioClientShareMode shareMode,
     WaveFormat desiredFormat)
 {
     WaveFormatExtensible closestMatchFormat;
     return IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat);
 }
예제 #56
0
        /// <summary>
        /// Determines if the specified output format is supported in shared mode
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="desiredFormat">Desired Format</param>
        /// <param name="closestMatchFormat">Output The closest match format.</param>
        /// <returns>True if the format is supported</returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat, out WaveFormatExtensible closestMatchFormat)
        {
            IntPtr pointerToPtr = GetPointerToPointer(); // IntPtr.Zero; // Marshal.AllocHGlobal(Marshal.SizeOf<WaveFormatExtensible>());
            closestMatchFormat = null;
            var pDesiredFormat = desiredFormat.AsInterop();
            int hresult = audioClientInterface.IsFormatSupported(shareMode, ref pDesiredFormat, pointerToPtr);

            var closestMatchPtr = MarshalHelpers.PtrToStructure<IntPtr>(pointerToPtr);

            if (closestMatchPtr != IntPtr.Zero)
            {
                closestMatchFormat = MarshalHelpers.PtrToStructure<WaveFormatExtensibleInterop>(closestMatchPtr);
                Marshal.FreeCoTaskMem(closestMatchPtr);
            }
            Marshal.FreeHGlobal(pointerToPtr);
            // S_OK is 0, S_FALSE = 1
            if (hresult == 0)
            {

                // directly supported
                return true;
            }
            if (hresult == 1)
            {
                return false;
            }
            if (hresult == (int)AudioClientErrors.UnsupportedFormat)
            {
                // Succeeded but the specified format is not supported in exclusive mode.
                return shareMode != AudioClientShareMode.Exclusive;
            }
            Marshal.ThrowExceptionForHR(hresult);
            // shouldn't get here
            throw new NotSupportedException("Unknown hresult " + hresult);
        }
예제 #57
0
 private bool IsFormatSupported(WaveFormat waveFormat, AudioClientShareMode sharemode, AudioClient audioClient)
 {
     return(audioClient.IsFormatSupported(sharemode, waveFormat));
 }
예제 #58
0
        public void Initialize(
            AudioClientShareMode shareMode,
            AudioClientStreamFlags streamFlags,
            long bufferDuration, long periodicity,
            WAVEFORMATEXTENSIBLE format, Guid audioSessionGuid)
        {
            int hr = _RealClient.Initialize(shareMode, streamFlags, bufferDuration, periodicity, format, ref audioSessionGuid);
            Marshal.ThrowExceptionForHR(hr);

            if ((streamFlags & AudioClientStreamFlags.EventCallback) != 0)
            {
                _audioSampleReady = new AutoResetEvent(false);
                IntPtr eventHandle = CreateEventEx(IntPtr.Zero, "audioSampleReady", CreateEventFlags.None, AccessRight.Synchronize | AccessRight.EventModifyState);
                _audioSampleReady.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(eventHandle, true);

                hr = _RealClient.SetEventHandle(eventHandle);
                Marshal.ThrowExceptionForHR(hr);

                _audioSampleReadyRegistered = ThreadPool.RegisterWaitForSingleObject(
                    _audioSampleReady, new WaitOrTimerCallback(sampleReady), null, -1, false);
            }
            _isInitialized = true;
        }
예제 #59
0
파일: WasapiCapture.cs 프로젝트: EQ4/cscore
 /// <summary>
 /// Initializes a new instance of the <see cref="WasapiCapture"/> class.
 /// CaptureThreadPriority = AboveNormal.
 /// </summary>
 /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior. Don't use this in combination with exclusive mode.</param>
 /// <param name="shareMode">Specifies how to open the audio device. Note that if exclusive mode is used, the device can only be used once on the whole system. Don't use exclusive mode in combination with eventSync.</param>
 /// <param name="latency">Latency of the capture specified in milliseconds.</param>
 /// <param name="defaultFormat">The default WaveFormat to use for the capture. If this parameter is set to null, the best available format will be chosen automatically.</param>
 public WasapiCapture(bool eventSync, AudioClientShareMode shareMode, int latency, WaveFormat defaultFormat)
     : this(eventSync, shareMode, latency, defaultFormat, ThreadPriority.AboveNormal)
 {
 }
예제 #60
0
파일: WasapiOut.cs 프로젝트: opcon/cscore
        /// <summary>
        ///     Initializes an new instance of <see cref="WasapiOut" /> class.
        /// </summary>
        /// <param name="eventSync">True, to use eventsynchronization instead of a simple loop and sleep behavior.</param>
        /// <param name="shareMode">
        ///     Specifies how to open the audio device. Note that if exclusive mode is used, only one single
        ///     playback for the specified device is possible at once.
        /// </param>
        /// <param name="latency">Latency of the playback specified in milliseconds.</param>
        /// <param name="playbackThreadPriority">
        ///     <see cref="ThreadPriority"/> of the playbackthread which runs in background and feeds the device
        ///     with data.
        /// </param>
        /// <param name="eventSyncContext">
        ///     The <see cref="SynchronizationContext"/> which is used to raise any events like the <see cref="Stopped"/>-event.
        ///     If the passed value is not null, the events will be called async through the <see cref="SynchronizationContext.Post"/> method.
        /// </param>
        public WasapiOut(bool eventSync, AudioClientShareMode shareMode, int latency,
			ThreadPriority playbackThreadPriority, SynchronizationContext eventSyncContext)
        {
            if (!IsSupportedOnCurrentPlatform)
                throw new PlatformNotSupportedException("Wasapi is only supported on Windows Vista and above.");

            if (latency <= 0)
                throw new ArgumentOutOfRangeException("latency");

            _latency = latency;
            _shareMode = shareMode;
            _eventSync = eventSync;
            _playbackThreadPriority = playbackThreadPriority;
            _syncContext = eventSyncContext;

            UseChannelMixingMatrices = false;
        }