예제 #1
0
        /// <summary>
        ///     Indicates whether the audio endpoint device
        ///     supports a particular stream format.
        /// </summary>
        /// <param name="shareMode">
        ///     The sharing mode for the stream format. Through this parameter, the client indicates whether it
        ///     wants to use the specified format in exclusive mode or shared mode.
        /// </param>
        /// <param name="waveFormat">The stream format to test whether it is supported by the <see cref="AudioClient" /> or not.</param>
        /// <returns>
        ///     <c>True</c> if the <paramref name="waveFormat" /> is supported. <c>False</c> if the
        ///     <paramref name="waveFormat" /> is not supported.
        /// </returns>
        /// <remarks>
        ///     For more information, see
        ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370876(v=vs.85).aspx" />.
        /// </remarks>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat)
        {
            WaveFormat tmp;
            bool       result = IsFormatSupported(shareMode, waveFormat, out tmp);

            if (result)
            {
                return(true);
            }

            WaveFormatExtensible waveFormatExtensible = waveFormat as WaveFormatExtensible;

            if (shareMode == AudioClientShareMode.Exclusive && waveFormatExtensible != null &&
                waveFormatExtensible.Channels <= 2)
            {
                /*see https://msdn.microsoft.com/en-us/library/windows/desktop/dd370811(v=vs.85).aspx
                 * To obtain reliable results from these drivers, exclusive-mode applications should call
                 * IsFormatSupported twice for each 1-channel or 2-channel PCM format—one call should use
                 * a stand-alone WAVEFORMATEX structure to specify the format, and the other call should
                 * use a WAVEFORMATEXTENSIBLE structure to specify the same format.
                 */
                var        encoding    = AudioSubTypes.EncodingFromSubType(waveFormatExtensible.SubFormat);
                WaveFormat waveFormat0 = new WaveFormat(
                    waveFormatExtensible.SampleRate,
                    waveFormatExtensible.BitsPerSample,
                    waveFormatExtensible.Channels,
                    encoding);

                result = IsFormatSupported(shareMode, waveFormat0, out tmp);
            }

            return(result);
        }
예제 #2
0
        private SourceReader Initialize(SourceReader reader)
        {
            try
            {
                reader.SetStreamSelection(SourceReaderIndex.AllStreams, false);
                reader.SetStreamSelection(SourceReaderIndex.FirstAudioStream, true);

                using (var mediaType = new MediaType())
                {
                    mediaType.Set(MediaTypeAttributeKeys.MajorType, AudioSubTypes.MediaTypeAudio);
                    mediaType.Set(MediaTypeAttributeKeys.Subtype, AudioSubTypes.Pcm);

                    reader.SetCurrentMediaType(SourceReaderIndex.FirstAudioStream, mediaType);
                }

                using (
                    var currentMediaType =
                        reader.GetCurrentMediaType(SourceReaderIndex.FirstAudioStream))
                {
                    if (currentMediaType.MajorType != AudioSubTypes.MediaTypeAudio)
                    {
                        throw new InvalidOperationException(String.Format(
                                                                "Invalid Majortype set on sourcereader: {0}.", currentMediaType.MajorType));
                    }

                    AudioEncoding encoding = AudioSubTypes.EncodingFromSubType(currentMediaType.SubType);

                    ChannelMask channelMask;
                    if (currentMediaType.TryGet(MediaFoundationAttributes.MF_MT_AUDIO_CHANNEL_MASK, out channelMask))
                    //check whether the attribute is available
                    {
                        _waveFormat = new WaveFormatExtensible(currentMediaType.SampleRate,
                                                               currentMediaType.BitsPerSample, currentMediaType.Channels, currentMediaType.SubType,
                                                               channelMask);
                    }
                    else
                    {
                        _waveFormat = new WaveFormat(currentMediaType.SampleRate, currentMediaType.BitsPerSample,
                                                     currentMediaType.Channels, encoding);
                    }
                }

                reader.SetStreamSelection(SourceReaderIndex.FirstAudioStream, true);

                if (_hasFixedLength)
                {
                    _length = GetLength(reader);
                }

                return(reader);
            }
            catch (Exception)
            {
                DisposeInternal();
                throw;
            }
        }
예제 #3
0
        private MFSourceReader Initialize(MFSourceReader reader)
        {
            try
            {
                reader.SetStreamSelection(NativeMethods.MF_SOURCE_READER_ALL_STREAMS, false);
                reader.SetStreamSelection(NativeMethods.MF_SOURCE_READER_FIRST_AUDIO_STREAM, true);

                using (MFMediaType mediaType = MFMediaType.CreateEmpty())
                {
                    mediaType.MajorType = AudioSubTypes.MediaTypeAudio;
                    mediaType.SubType   = AudioSubTypes.Pcm; //variable??

                    reader.SetCurrentMediaType(NativeMethods.MF_SOURCE_READER_FIRST_AUDIO_STREAM, mediaType);
                }

                using (
                    MFMediaType currentMediaType =
                        reader.GetCurrentMediaType(NativeMethods.MF_SOURCE_READER_FIRST_AUDIO_STREAM))
                {
                    if (currentMediaType.MajorType != AudioSubTypes.MediaTypeAudio)
                    {
                        throw new InvalidOperationException(String.Format(
                                                                "Invalid Majortype set on sourcereader: {0}.", currentMediaType.MajorType));
                    }

                    AudioEncoding encoding = AudioSubTypes.EncodingFromSubType(currentMediaType.SubType);

                    ChannelMask channelMask;
                    if (currentMediaType.TryGet(MediaFoundationAttributes.MF_MT_AUDIO_CHANNEL_MASK, out channelMask))
                    //check whether the attribute is available
                    {
                        _waveFormat = new WaveFormatExtensible(currentMediaType.SampleRate,
                                                               currentMediaType.BitsPerSample, currentMediaType.Channels, currentMediaType.SubType,
                                                               channelMask);
                    }
                    else
                    {
                        _waveFormat = new WaveFormat(currentMediaType.SampleRate, currentMediaType.BitsPerSample,
                                                     currentMediaType.Channels, encoding);
                    }
                }

                reader.SetStreamSelection(NativeMethods.MF_SOURCE_READER_FIRST_AUDIO_STREAM, true);

                if (_hasFixedLength)
                {
                    _length = GetLength(reader);
                }

                return(reader);
            }
            catch (Exception)
            {
                DisposeInternal();
                throw;
            }
        }
예제 #4
0
        public static MediaType MediaTypeFromWaveFormat(WaveFormat waveFormat)
        {
            var mediaType = new MediaType();

            mediaType.SubType               = AudioSubTypes.SubTypeFromEncoding(waveFormat.WaveFormatTag);
            mediaType.SampleRate            = waveFormat.SampleRate;
            mediaType.Channels              = waveFormat.Channels;
            mediaType.AverageBytesPerSecond = waveFormat.BytesPerSecond;
            mediaType.BitsPerSample         = waveFormat.BitsPerSample;
            return(mediaType);
        }
예제 #5
0
        private void WriteFmtChunk()
        {
            AudioEncoding tag = _waveFormat.WaveFormatTag;

            if (tag == AudioEncoding.Extensible && _waveFormat is WaveFormatExtensible)
            {
                tag = AudioSubTypes.EncodingFromSubType((_waveFormat as WaveFormatExtensible).SubFormat);
            }

            _writer.Write(Encoding.UTF8.GetBytes("fmt "));
            _writer.Write((int)16);
            _writer.Write((short)tag);
            _writer.Write((short)_waveFormat.Channels);
            _writer.Write((int)_waveFormat.SampleRate);
            _writer.Write((int)_waveFormat.BytesPerSecond);
            _writer.Write((short)_waveFormat.BlockAlign);
            _writer.Write((short)_waveFormat.BitsPerSample);
        }