/// <summary> /// The GetSessionEnumerator method gets a pointer to the audio session enumerator object. /// </summary> public AudioSessionEnumerator GetSessionEnumerator() { AudioSessionEnumerator sessionEnumerator; CoreAudioAPIException.Try(GetSessionEnumeratorNative(out sessionEnumerator), c, "GetSessionEnumerator"); return(sessionEnumerator); }
/// <summary> /// Retrieves the maximum latency for the current stream and can /// be called any time after the stream has been initialized. /// </summary> /// <remarks> /// Rendering clients can use this latency value to compute the minimum amount of data that /// they can write during any single processing pass. To write less than this minimum is to /// risk introducing glitches into the audio stream. For more information, see /// <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370874(v=vs.85).aspx" />. /// </remarks> /// <returns>A value representing the latency. The time is expressed in 100-nanosecond units.</returns> public long GetStreamLatency() { long latency; CoreAudioAPIException.Try(GetStreamLatencyNative(out latency), InterfaceName, "GetStreamLatency"); return(latency); }
/// <summary> /// Retrieves the stream format that the audio engine uses for its /// internal processing of shared-mode streams. /// </summary> /// <remarks> /// For more information, see /// <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370872(v=vs.85).aspx" />. /// </remarks> /// <returns>The mix format that the audio engine uses for its internal processing of shared-mode streams.</returns> public WaveFormat GetMixFormat() { WaveFormat waveFormat; CoreAudioAPIException.Try(GetMixFormatNative(out waveFormat), InterfaceName, "GetMixFormat"); return(waveFormat); }
/// <summary> /// Creates a COM object with the specified interface. /// </summary> /// <param name="iid">The interface identifier. This parameter is a reference to a GUID that identifies the interface that the caller requests be activated. The caller will use this interface to communicate with the COM object.</param> /// <param name="context">The execution context in which the code that manages the newly created object will run. </param> /// <param name="activationParams">Use <see cref="IntPtr.Zero"/> as the default value. See http://msdn.microsoft.com/en-us/library/windows/desktop/dd371405%28v=vs.85%29.aspx for more details.</param> /// <returns>A pointer variable into which the method writes the address of the interface specified by parameter <paramref name="iid"/>.</returns> public IntPtr Activate(Guid iid, CLSCTX context, IntPtr activationParams) { IntPtr ptr; CoreAudioAPIException.Try(ActivateNative(iid, context, activationParams, out ptr), InterfaceName, "Activate"); return(ptr); }
/// <summary> /// Initializes the audio stream. /// </summary> /// <param name="shareMode"> /// The sharing mode for the connection. Through this parameter, the client tells the audio engine /// whether it wants to share the audio endpoint device with other clients. /// </param> /// <param name="streamFlags">Flags to control creation of the stream.</param> /// <param name="hnsBufferDuration"> /// The buffer capacity as a time value (expressed in 100-nanosecond units). This parameter /// contains the buffer size that the caller requests for the buffer that the audio application will share with the /// audio engine (in shared mode) or with the endpoint device (in exclusive mode). If the call succeeds, the method /// allocates a buffer that is a least this large. /// </param> /// <param name="hnsPeriodicity"> /// The device period. This parameter can be nonzero only in exclusive mode. In shared mode, /// always set this parameter to 0. In exclusive mode, this parameter specifies the requested scheduling period for /// successive buffer accesses by the audio endpoint device. If the requested device period lies outside the range that /// is set by the device's minimum period and the system's maximum period, then the method clamps the period to that /// range. If this parameter is 0, the method sets the device period to its default value. To obtain the default device /// period, call the <see cref="GetDevicePeriodNative" /> method. If the /// <see cref="AudioClientStreamFlags.StreamFlagsEventCallback" /> stream flag is set and /// <see cref="AudioClientShareMode.Exclusive" /> is set as the <paramref name="shareMode" />, then /// <paramref name="hnsPeriodicity" /> must be nonzero and equal to <paramref name="hnsBufferDuration" />. /// </param> /// <param name="waveFormat"> /// The format descriptor. For more information, see /// <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />. /// </param> /// <param name="audioSessionGuid"> /// A value that identifies the audio session that the stream belongs to. If the /// <see cref="Guid" /> identifies a session that has been previously opened, the method adds the stream to that /// session. If the GUID does not identify an existing session, the method opens a new session and adds the stream to /// that session. The stream remains a member of the same session for its lifetime. Use <see cref="Guid.Empty" /> to /// use the default session. /// </param> /// <remarks> /// For more information, see /// <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370875(v=vs.85).aspx" />. /// </remarks> public void Initialize(AudioClientShareMode shareMode, AudioClientStreamFlags streamFlags, long hnsBufferDuration, long hnsPeriodicity, WaveFormat waveFormat, Guid audioSessionGuid) { CoreAudioAPIException.Try( InitializeNative(shareMode, streamFlags, hnsBufferDuration, hnsPeriodicity, waveFormat, audioSessionGuid), InterfaceName, "Initialize"); }
/// <summary> /// The GetBufferSize method retrieves the size (maximum capacity) of the endpoint buffer. /// </summary> /// /// /// <remarks> /// The length is expressed as the number of audio frames the buffer can hold. The size in /// bytes of an audio frame is calculated as the number of channels in the stream multiplied /// by the sample size per channel. For example, the frame size is four bytes for a stereo /// (2-channel) stream with 16-bit samples. /// </remarks> public int GetBufferSize() { uint bufferSize; CoreAudioAPIException.Try(GetBufferSize(out bufferSize), c, "GetBufferSize"); return((int)bufferSize); }
/// <summary> /// The GetCurrentPadding method retrieves the number of frames of padding in the endpoint /// buffer. /// </summary> public int GetCurrentPadding() { uint padding; CoreAudioAPIException.Try(GetCurrentPaddingNative(out padding), c, "GetCurrentPadding"); return((int)padding); }
/// <summary> /// The GetChannelVolumeLevelScalar method gets the normalized, audio-tapered volume level /// of the specified channel of the audio stream that enters or leaves the audio endpoint /// device. /// </summary> /// <returns>Volume level of a specific channel. The level is expressed as a normalized /// value in the range from 0.0 to 1.0.</returns> public float GetChannelVolumeLevelScalar(uint channel) { float result; CoreAudioAPIException.Try(GetChannelVolumeLevelScalarNative(channel, out result), c, "GetChannelVolumeLevelScalar"); return(result); }
/// <summary> /// The GetMute method gets the muting state of the audio stream that enters or leaves the /// audio endpoint device. /// </summary> /// <returns>True = Stream is muted. False = Stream is not muted.</returns> public bool GetMute() { NativeBool result; CoreAudioAPIException.Try(GetMuteNative(out result), c, "GetMute"); return(result); }
/// <summary> /// The GetChannelCount method gets a count of the channels in the audio stream that enters /// or leaves the audio endpoint device. /// </summary> public uint GetChannelCount() { uint result; CoreAudioAPIException.Try(GetChannelCountNative(out result), c, "GetChannelCount"); return(result); }
/// <summary> /// The GetMasterVolumeLevelScalar method gets the master volume level of the audio stream /// that enters or leaves the audio endpoint device. The volume level is expressed as a /// normalized, audio-tapered value in the range from 0.0 to 1.0. /// </summary> /// <returns>Volume level. The level is expressed as a normalized value in the range from /// 0.0 to 1.0.</returns> public float GetMasterVolumeLevelScalar() { float result; CoreAudioAPIException.Try(GetMasterVolumeLevelScalarNative(out result), c, "GetMasterVolumeLevelScalar"); return(result); }
/// <summary> /// The GetPeakValue method gets the peak sample value for the channels in the audio stream. /// </summary> public float GetPeakValue() { float peak; CoreAudioAPIException.Try(GetPeakValueNative(out peak), c, "GetPeakValue"); return(peak); }
/// <summary> /// The GetMeteringChannelCount method gets the number of channels in the audio stream that /// are monitored by peak meters. /// </summary> public int GetMeteringChannelCount() { int channelCount; CoreAudioAPIException.Try(GetMeteringChannelCountNative(out channelCount), c, "GetMeteringChannelCount"); return(channelCount); }
/// <summary> /// The GetNextPacketSize method retrieves the number of frames in the next data packet in /// the capture endpoint buffer. /// For more information, see <see href="http://msdn.microsoft.com/en-us/library/dd370860(v=vs.85).aspx"/>. /// </summary> /// <returns>The number of the audio frames in the next capture packet.</returns> public int GetNextPacketSize() { int t; CoreAudioAPIException.Try(GetNextPacketSizeNative(out t), InterfaceName, "GetNextPacketSize"); return(t); }
public int GetCount() { int count = 0; CoreAudioAPIException.Try(GetCountNative(out count), "IMMDeviceCollection", "GetCount"); return(count); }
/// <summary> /// The QueryHardwareSupport method queries the audio endpoint device for its /// hardware-supported functions. /// </summary> public EndpointHardwareSupport QueryHardwareSupport() { EndpointHardwareSupport result; CoreAudioAPIException.Try(QueryHardwareSupportNative(out result), c, "QueryHardWareSupport"); return(result); }
public MMDevice ItemAt(int deviceIndex) { IntPtr device; CoreAudioAPIException.Try(ItemAtNative(deviceIndex, out device), "IMMDeviceCollection", "Item"); return(new MMDevice(device)); }
/// <summary> /// The GetNextPacketSize method retrieves the number of frames in the next data packet in /// the capture endpoint buffer. /// http: //msdn.microsoft.com/en-us/library/dd370860(v=vs.85).aspx /// </summary> public uint GetNextPacketSize() { uint t; CoreAudioAPIException.Try(GetNextPacketSizeNative(out t), c, "GetNextPacketSize"); return(t); }
/// <summary> /// The GetStreamLatency method retrieves the maximum latency for the current stream and can /// be called any time after the stream has been initialized. /// </summary> /// <remarks> /// Rendering clients can use this latency value to compute the minimum amount of data that /// they can write during any single processing pass. To write less than this minimum is to /// risk introducing glitches into the audio stream. For more information, see /// IAudioRenderClient::GetBuffer. /// </remarks> public long GetStreamLatency() { long latency = -1; CoreAudioAPIException.Try(GetStreamLatency(out latency), c, "GetStreamLatency"); return(latency); }
/// <summary> /// The GetAudioSessionControl method retrieves an audio session control. /// </summary> /// <param name="audioSessionGuid">If the GUID does not identify a session that has been previously opened, the call opens a new but empty session. If the value is Guid.Empty, the method assigns the stream to the default session.</param> /// <param name="streamFlags">Specifies the status of the flags for the audio stream.</param> public AudioSessionControl GetAudioSessionControl(Guid audioSessionGuid, int streamFlags) { AudioSessionControl sessionControl; CoreAudioAPIException.Try(GetAudioSessionControlNative(audioSessionGuid, streamFlags, out sessionControl), "IAudioSessionManager", "GetAudioSessionControl"); return(sessionControl); }
/// <summary> /// The GetMixFormat method retrieves the stream format that the audio engine uses for its /// internal processing of shared-mode streams. /// </summary> public WaveFormat GetMixFormat() { WaveFormat waveFormat; CoreAudioAPIException.Try(GetMixFormat(out waveFormat), c, "GetMixFormat"); return(waveFormat); }
/// <summary> /// The GetSimpleAudioVolume method retrieves a simple audio volume control. /// </summary> /// <param name="crossProcessSession">Specifies whether the request is for a cross-process session. Set to TRUE if the session is cross-process. Set to FALSE if the session is not cross-process.</param> /// <param name="audioSessionGuid">If the GUID does not identify a session that has been previously opened, the call opens a new but empty session. If the value is Guid.Empty, the method assigns the stream to the default session.</param> public SimpleAudioVolume GetSimpleAudioVolume(Guid audioSessionGuid, bool crossProcessSession) { SimpleAudioVolume v; CoreAudioAPIException.Try(GetSimpleAudioVolumeNative(audioSessionGuid, (NativeBool)crossProcessSession, out v), "IAudioSessionManager", "GetSimpleAudioVolume"); return(v); }
/// <summary> /// Retrieves a pointer to the next available space in the rendering endpoint buffer into /// which the caller can write a data packet. /// </summary> /// <param name="numFramesRequested"> /// The number of audio frames in the data packet that the caller plans to write to the requested space in the buffer. /// If the call succeeds, the size of the buffer area pointed to by return value matches the size specified in /// <paramref name="numFramesRequested" />. /// </param> /// <returns> /// A pointer variable into which the method writes the starting address of the buffer area into which the caller /// will write the data packet. /// </returns> public IntPtr GetBuffer(int numFramesRequested) { IntPtr ptr; CoreAudioAPIException.Try(GetBufferNative(numFramesRequested, out ptr), InterfaceName, "GetBuffer"); return(ptr); }
public MMDevice GetDefaultAudioEndpoint(DataFlow dataFlow, Role role) { IntPtr ptr; CoreAudioAPIException.Try(GetDefaultAudioEndpointNative(dataFlow, role, out ptr), c, "GetDefaultAudioEndpoint"); return(new MMDevice(ptr)); }
/// <summary> /// Returns the size (maximum capacity) of the endpoint buffer. /// </summary> /// <returns>The number of audio frames that the buffer can hold.</returns> /// <remarks> /// The size of one frame = <c>(number of bits per sample)/8 * (number of channels)</c> /// </remarks> /// <returns>HRESULT</returns> public int GetBufferSize() { int bufferSize; CoreAudioAPIException.Try(GetBufferSizeNative(out bufferSize), InterfaceName, "GetBufferSize"); return(bufferSize); }
public MMDeviceCollection EnumAudioEndpoints(DataFlow dataFlow, DeviceState stateMask) { IntPtr pcollection; CoreAudioAPIException.Try(EnumAudioEndpointsNative(dataFlow, stateMask, out pcollection), c, "EnumAudioEndpoints"); return(new MMDeviceCollection(pcollection)); }
/// <summary> /// Retrieves the number of frames of padding in the endpoint /// buffer. /// </summary> /// <returns>The frame count (the number of audio frames of padding in the buffer).</returns> /// <remarks> /// The size of one frame = <c>(number of bits per sample)/8 * (number of channels)</c> /// </remarks> public int GetCurrentPadding() { int padding; CoreAudioAPIException.Try(GetCurrentPaddingNative(out padding), InterfaceName, "GetCurrentPadding"); return(padding); }
public MMDevice GetDevice(string id) { IntPtr ptr; CoreAudioAPIException.Try(GetDeviceNative(id, out ptr), c, "GetDevice"); return(new MMDevice(ptr)); }
/// <summary> /// Accesses additional services from the audio client object. /// </summary> /// <param name="riid"> /// The interface ID for the requested service. For a list of all available values, see /// <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370873(v=vs.85).aspx" />. /// </param> /// <returns> /// A pointer into which the method writes the address of an instance of the requested interface. /// Through this method, the caller obtains a counted reference to the interface. The caller is responsible for /// releasing the interface, when it is no longer needed, by calling the interface's Release method. /// </returns> /// <remarks> /// For more information, see /// <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370873(v=vs.85).aspx" />. /// </remarks> public IntPtr GetService(Guid riid) { IntPtr ptr; CoreAudioAPIException.Try(GetServiceNative(riid, out ptr), InterfaceName, "GetService"); return(ptr); }
/// <summary> /// Gets the audio session specified by an audio session number. /// </summary> /// <param name="index">The session number. If there are n sessions, the sessions are numbered from 0 to n – 1. To get the number of sessions, call the GetCount method.</param> /// <returns>The <see cref="AudioSessionControl"/> of the specified session number.</returns> public AudioSessionControl GetSession(int index) { AudioSessionControl session; CoreAudioAPIException.Try(GetSessionNative(index, out session), InterfaceName, "GetSession"); return(session); }