예제 #1
0
        /// <summary>
        /// Creates a MediaType based on a given WaveFormat. Don't forget to call Free() for the returend MediaType.
        /// </summary>
        /// <param name="waveFormat">WaveFormat to create a MediaType from.</param>
        /// <returns>Dmo MediaType</returns>
        public static MediaType FromWaveFormat(WaveFormat waveFormat)
        {
            if (waveFormat == null)
            {
                throw new ArgumentNullException("waveFormat");
            }

            MediaType mediaType = new MediaType();

            NativeMethods.MoInitMediaType(ref mediaType, Marshal.SizeOf(waveFormat));

            mediaType.MajorType        = MediaTypes.MediaTypeAudio;
            mediaType.SubType          = WaveFormatExtensible.SubTypeFromWaveFormat(waveFormat);
            mediaType.FixedSizeSamples = (mediaType.SubType == MediaTypes.MEDIATYPE_IeeeFloat || mediaType.SubType == MediaTypes.MEDIATYPE_Pcm) ? 1 : 0;
            mediaType.FormatType       = FORMAT_WaveFormatEx;

            IntPtr hWaveFormat = Marshal.AllocHGlobal(Marshal.SizeOf(waveFormat));

            Marshal.StructureToPtr(waveFormat, hWaveFormat, false);

            if (hWaveFormat == IntPtr.Zero)
            {
                throw new InvalidOperationException("hWaveFormat == IntPtr.Zero");
            }
            if (mediaType.CbFormat < Marshal.SizeOf(waveFormat))
            {
                throw new InvalidOperationException("No memory for Format reserved");
            }
            mediaType.PtrFormat = hWaveFormat;

            return(mediaType);
        }
예제 #2
0
        /// <summary>
        /// Determines whether if the specified output format is supported
        /// </summary>
        /// <param name="shareMode">The share mode.</param>
        /// <param name="desiredFormat">The desired format.</param>
        /// <returns>
        ///     <c>true</c> if [is format supported] [the specified share mode]; otherwise, <c>false</c>.
        /// </returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode,
                                      WaveFormatExtensible desiredFormat)
        {
            WaveFormatExtensible closestMatchFormat;

            return(IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat));
        }
예제 #3
0
        /// <summary>
        /// Determines if the specified output format is supported in shared mode
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="desiredFormat">Desired Format</param>
        /// <param name="closestMatchFormat">Output The closest match format.</param>
        /// <returns>
        ///     <c>true</c> if [is format supported] [the specified share mode]; otherwise, <c>false</c>.
        /// </returns>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat desiredFormat,
                                      out WaveFormatExtensible closestMatchFormat)
        {
            int hresult = audioClientInterface.IsFormatSupported(shareMode, desiredFormat, out closestMatchFormat);

            // S_OK is 0, S_FALSE = 1
            if (hresult == 0)
            {
                // directly supported
                return(true);
            }
            if (hresult == 1)
            {
                return(false);
            }
            else if (hresult == (int)AudioClientErrors.UnsupportedFormat)
            {
                return(false);
            }
            else
            {
                Marshal.ThrowExceptionForHR(hresult);
            }
            // shouldn't get here
            throw new NotSupportedException("Unknown hresult " + hresult.ToString());
        }
예제 #4
0
        /// <summary>
        /// Gets the waveformat of this stream
        /// </summary>
        /// <returns>A waveformat (or null if this is not an audio stream)</returns>
        public WaveFormat GetWaveFormat()
        {
            var    props  = (IWMMediaProps)streamConfig;
            int    size   = Math.Max(512, Marshal.SizeOf(typeof(WM_MEDIA_TYPE)) + Marshal.SizeOf(typeof(WaveFormat)));
            IntPtr buffer = Marshal.AllocCoTaskMem(size);

            try
            {
                props.GetMediaType(buffer, ref size);
                var mt = (WM_MEDIA_TYPE)Marshal.PtrToStructure(buffer, typeof(WM_MEDIA_TYPE));
                if ((mt.majortype == MediaTypes.WMMEDIATYPE_Audio) &&
                    // n.b. subtype may not be PCM, but some variation of WM Audio
                    (mt.formattype == MediaTypes.WMFORMAT_WaveFormatEx))
                {
                    var fmt = new WaveFormatExtensible(44100, 16, 2);
                    Marshal.PtrToStructure(mt.pbFormat, fmt);
                    return(fmt);
                }
                return(null);
            }
            finally
            {
                Marshal.FreeCoTaskMem(buffer);
            }
        }
예제 #5
0
        public void CanRequestIfFormatIsSupportedExtensible44100ExclusiveMode()
        {
            WaveFormatExtensible desiredFormat = new WaveFormatExtensible(44100, 32, 2);

            Debug.WriteLine(desiredFormat);
            GetAudioClient().IsFormatSupported(AudioClientShareMode.Exclusive, desiredFormat);
        }
예제 #6
0
        public void CanRequestIfFormatIsSupportedExtensible44100SharedMode()
        {
            WaveFormatExtensible desiredFormat = new WaveFormatExtensible(44100, 32, 2);

            Console.Write(desiredFormat);
            GetAudioClient().IsFormatSupported(AudioClientShareMode.Shared, desiredFormat);
        }
예제 #7
0
        public static MediaType MediaTypeFromWaveFormat(WaveFormatExtensible waveFormat)
        {
            var mediaType = new MediaType();

            MediaFactory.MFInitMediaTypeFromWaveFormatEx(mediaType, waveFormat, Marshal.SizeOf(waveFormat));
            return(mediaType);
        }
예제 #8
0
 /// <summary>
 /// Initializes the sound library for playback.
 /// </summary>
 /// <param name="root">The root directory of the sounds.</param>
 public static void initialize(String root)
 {
     setRootDirectory(root);
     SoundPath          = "s";
     NSoundPath         = SoundPath + "\\n";
     NumPath            = NSoundPath + "\\ns";
     mainSoundDevice    = new XAudio2();
     mainMasteringVoice = new MasteringVoice(mainSoundDevice);
     if (mainSoundDevice.Version == XAudio2Version.Version27)
     {
         WaveFormatExtensible deviceFormat = mainSoundDevice.GetDeviceDetails(0).OutputFormat;
         x3DAudio = new X3DAudio(deviceFormat.ChannelMask);
     }
     else
     {
         x3DAudio = new X3DAudio((Speakers)mainMasteringVoice.ChannelMask);
     }
     musicDevice              = new XAudio2();
     musicMasteringVoice      = new MasteringVoice(musicDevice);
     alwaysLoudDevice         = new XAudio2();
     alwaysLoudMasteringVoice = new MasteringVoice(alwaysLoudDevice);
     cutScenesDevice          = new XAudio2();
     cutScenesMasteringVoice  = new MasteringVoice(cutScenesDevice);
     //get the listener:
     setListener();
 }
예제 #9
0
        public Sound LoadOgg(string fileName)
        {
            var                  stream = new OggVorbisFileStream(fileName);
            VorbisInfo           vInfo  = stream.Info;
            WaveFormatExtensible wfe    = new WaveFormatExtensible
            {
                // cbSize
                BitsPerSample         = 16,
                Channels              = (short)vInfo.Channels,
                SamplesPerSecond      = vInfo.Rate, // ogg vorbis always uses 16 bits
                AverageBytesPerSecond = vInfo.Rate * vInfo.Channels * 2,
                BlockAlignment        = (short)(2 * vInfo.Channels),
                FormatTag             = WaveFormatTag.Pcm
            };

            AudioBuffer buffer = new AudioBuffer();

            buffer.AudioData  = stream;
            buffer.AudioBytes = (int)stream.Length;
            buffer.Flags      = BufferFlags.EndOfStream;

            return(new Sound {
                Buffer = buffer, Stream = stream, Format = wfe, Program = this
            });
        }
예제 #10
0
        /// <summary>
        ///     Indicates whether the audio endpoint device
        ///     supports a particular stream format.
        /// </summary>
        /// <param name="shareMode">
        ///     The sharing mode for the stream format. Through this parameter, the client indicates whether it
        ///     wants to use the specified format in exclusive mode or shared mode.
        /// </param>
        /// <param name="waveFormat">The stream format to test whether it is supported by the <see cref="AudioClient" /> or not.</param>
        /// <returns>
        ///     <c>True</c> if the <paramref name="waveFormat" /> is supported. <c>False</c> if the
        ///     <paramref name="waveFormat" /> is not supported.
        /// </returns>
        /// <remarks>
        ///     For more information, see
        ///     <see href="https://msdn.microsoft.com/en-us/library/windows/desktop/dd370876(v=vs.85).aspx" />.
        /// </remarks>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat)
        {
            WaveFormat tmp;
            bool       result = IsFormatSupported(shareMode, waveFormat, out tmp);

            if (result)
            {
                return(true);
            }

            WaveFormatExtensible waveFormatExtensible = waveFormat as WaveFormatExtensible;

            if (shareMode == AudioClientShareMode.Exclusive && waveFormatExtensible != null &&
                waveFormatExtensible.Channels <= 2)
            {
                /*see https://msdn.microsoft.com/en-us/library/windows/desktop/dd370811(v=vs.85).aspx
                 * To obtain reliable results from these drivers, exclusive-mode applications should call
                 * IsFormatSupported twice for each 1-channel or 2-channel PCM format—one call should use
                 * a stand-alone WAVEFORMATEX structure to specify the format, and the other call should
                 * use a WAVEFORMATEXTENSIBLE structure to specify the same format.
                 */
                var        encoding    = AudioSubTypes.EncodingFromSubType(waveFormatExtensible.SubFormat);
                WaveFormat waveFormat0 = new WaveFormat(
                    waveFormatExtensible.SampleRate,
                    waveFormatExtensible.BitsPerSample,
                    waveFormatExtensible.Channels,
                    encoding);

                result = IsFormatSupported(shareMode, waveFormat0, out tmp);
            }

            return(result);
        }
예제 #11
0
        /// <summary>
        /// Initializes a new instance of the <see cref="DmoChannelResampler"/> class.
        /// </summary>
        /// <param name="source">Underlying source which has to get resampled.</param>
        /// <param name="channelMatrix"><see cref="ChannelMatrix" /> which defines how to map each channel.</param>
        /// <param name="outputFormat">Waveformat, which specifies the new format. Note, that by far not all formats are supported.</param>
        /// <exception cref="System.ArgumentNullException">
        /// source
        /// or
        /// channelMatrix
        /// or
        /// outputFormat
        /// </exception>
        /// <exception cref="System.ArgumentException">The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.</exception>
        public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, WaveFormat outputFormat)
            : base(source, outputFormat)
        {
            if (source == null)
                throw new ArgumentNullException("source");
            if (channelMatrix == null)
                throw new ArgumentNullException("channelMatrix");
            if(outputFormat == null)
                throw new ArgumentNullException("outputFormat");

            if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
            {
                throw new ArgumentException(
                    "The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.");
            }

            var inputFormat = new WaveFormatExtensible(
                source.WaveFormat.SampleRate,
                source.WaveFormat.BitsPerSample,
                source.WaveFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat),
                channelMatrix.InputMask);

            Outputformat = new WaveFormatExtensible(
                outputFormat.SampleRate,
                outputFormat.BitsPerSample,
                outputFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(outputFormat),
                channelMatrix.OutputMask);

            Initialize(inputFormat, Outputformat);
            _channelMatrix = channelMatrix;
            CommitChannelMatrixChanges();
        }
예제 #12
0
        public void CanRequestIfFormatIsSupportedExtensible48000_16bit()
        {
            WaveFormatExtensible desiredFormat = new WaveFormatExtensible(48000, 16, 2);

            Debug.WriteLine(desiredFormat);
            GetAudioClient().IsFormatSupported(AudioClientShareMode.Shared, desiredFormat);
        }
예제 #13
0
        public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, int destSampleRate)
            : base(source, destSampleRate)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (channelMatrix == null)
            {
                throw new ArgumentNullException("channelMatrix");
            }

            if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
            {
                throw new ArgumentException("source.WaveFormat.Channels != channelMatrix.InputChannelCount");
            }

            WaveFormatExtensible inputformat = new WaveFormatExtensible(source.WaveFormat.SampleRate, source.WaveFormat.BitsPerSample,
                                                                        source.WaveFormat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat), _channelMatrix.InputMask);

            _outputformat = new WaveFormat(destSampleRate, source.WaveFormat.BitsPerSample, 6, source.WaveFormat.WaveFormatTag, source.WaveFormat.ExtraSize);
            WaveFormatExtensible outputformat = new WaveFormatExtensible(_outputformat.SampleRate, _outputformat.BitsPerSample,
                                                                         _outputformat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(_outputformat), _channelMatrix.OutputMask);

            Init(inputformat, outputformat);
            _resampler.ResamplerProps.SetUserChannelMtx(_channelMatrix.GetMatrix());
        }
예제 #14
0
        public static AudioSampleFormat GetSampleFormat(WaveFormatExtensible format)
        {
            if (format.nSamplesPerSec == 0)
            {
                return(AudioSampleFormat.Unknown);
            }

            switch (format.wFormatTag)
            {
            case WAVE_FORMAT_IEEE_FLOAT:
                switch (format.wBitsPerSample)
                {
                case 32: return(AudioSampleFormat.Float);

                case 64: return(AudioSampleFormat.Double);
                }
                break;

            case WAVE_FORMAT_PCM:
                switch (format.wBitsPerSample)
                {
                case 8: return(AudioSampleFormat.Pcm8);

                case 16: return(AudioSampleFormat.Pcm16);

                case 24: return(AudioSampleFormat.Pcm24);

                case 32: return(AudioSampleFormat.Pcm32);
                }
                break;

            case WAVE_FORMAT_EXTENSIBLE:
                if (format.SubFormat == MediaSubType.IEEE_FLOAT)
                {
                    switch (format.wBitsPerSample)
                    {
                    case 32: return(AudioSampleFormat.Float);

                    case 64: return(AudioSampleFormat.Double);
                    }
                }
                else if (format.SubFormat == MediaSubType.PCM)
                {
                    switch (format.wBitsPerSample)
                    {
                    case 8: return(AudioSampleFormat.Pcm8);

                    case 16: return(AudioSampleFormat.Pcm16);

                    case 24: return(AudioSampleFormat.Pcm24);

                    case 32: return(AudioSampleFormat.Pcm32);
                    }
                }
                break;
            }

            return(AudioSampleFormat.Unknown);
        }
    public WaveOut(
        int deviceID,
        short channels,
        int samplesPerSecond,
        SPEAKER channelMask,
        Guid formatSubType,
        ILoggerFactory loggerFactory,
        ElapsedTimeCounter counter,
        ITargetBlock <PcmBuffer <T> > releaseQueue
        )
    {
        _loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory));
        _counter       = counter ?? throw new ArgumentNullException(nameof(counter));

        ArgumentNullException.ThrowIfNull(releaseQueue);

        _logger         = _loggerFactory.CreateLogger <WaveOut <T> >();
        _headerPool     = new BufferPool <WaveHeaderBuffer>(1, () => { return(new WaveHeaderBuffer()); }, _loggerFactory);
        _driverCallBack = new PinnedDelegate <DriverCallBack.Proc>(new DriverCallBack.Proc(DriverCallBackProc));

        var format = new WaveFormatExtensible();

        format.wfe.formatType            = WaveFormatEx.FORMAT.EXTENSIBLE;
        format.wfe.channels              = (ushort)channels;
        format.wfe.samplesPerSecond      = (uint)samplesPerSecond;
        format.wfe.bitsPerSample         = (ushort)(SIZE_OF_T * 8);
        format.wfe.blockAlign            = (ushort)(format.wfe.channels * format.wfe.bitsPerSample / 8);
        format.wfe.averageBytesPerSecond = format.wfe.samplesPerSecond * format.wfe.blockAlign;
        format.wfe.size = (ushort)(Marshal.SizeOf <WaveFormatExtensiblePart>());

        format.exp.validBitsPerSample = format.wfe.bitsPerSample;
        format.exp.channelMask        = ToSPEAKER(channelMask);
        format.exp.subFormat          = formatSubType;

        //たまに失敗するので、ピン止めしておく
        using var formatPin = new PinnedBuffer <WaveFormatExtensible>(format);

        var mmResult =
            NativeMethods.waveOutOpen(
                out _handle,
                deviceID,
                ref format,
                _driverCallBack.FunctionPointer,
                IntPtr.Zero,
                (
                    DriverCallBack.TYPE.FUNCTION
                    | DriverCallBack.TYPE.WAVE_FORMAT_DIRECT
                )
                );

        if (mmResult != MMRESULT.NOERROR)
        {
            throw new WaveException(mmResult);
        }

        _releaseAction = new TransformBlock <IntPtr, PcmBuffer <T> >(headerPtr => Unprepare(headerPtr));
        _releaseAction.LinkTo(releaseQueue);
    }
예제 #16
0
        private WaveFormat SetupWaveFormat(WaveFormat waveFormat, AudioClient audioClient)
        {
            WaveFormatExtensible closestMatch;
            WaveFormat           finalFormat = waveFormat;

            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats = new WaveFormatExtensible[]
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 8, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            //no format found...
                            possibleFormats = new WaveFormatExtensible[]
                            {
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 2, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8, 2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 1, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 1, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 1, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8, 1, DMO.MediaTypes.MEDIATYPE_Pcm)
                            };

                            if (CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                throw new NotSupportedException("Could not find a supported format.");
                            }
                        }
                    }

                    finalFormat = mixformat;
                    //todo: implement channel matrix
                    DmoResampler resampler = new DmoResampler(_source, finalFormat);
                    resampler.Quality = 60;
                    _source           = resampler;
                    _createdResampler = true;
                }
                else
                {
                    finalFormat = closestMatch;
                }
            }

            return(finalFormat);
        }
예제 #17
0
        /// <summary>
        /// Pans a sound.
        /// This method was initially written using the guide at https://docs.microsoft.com/en-us/windows/win32/xaudio2/how-to--pan-a-sound
        /// The code has finally been improved thanks to the MonoGame framework code: https://github.com/MonoGame/MonoGame
        /// </summary>
        /// <param name="sound">The sound to pan.</param>
        /// <param name="pan">The value by which to pan the sound. -1.0f is completely left, and 1.0f is completely right. 0.0f is center.</param>
        public static void setPan(ExtendedAudioBuffer sound, float pan)
        {
            SpeakerConfiguration mask;

            if (mainSoundDevice.Version == XAudio2Version.Version27)
            {
                WaveFormatExtensible deviceFormat = mainSoundDevice.GetDeviceDetails(0).OutputFormat;
                mask = (SpeakerConfiguration)deviceFormat.ChannelMask;
            }
            else
            {
                mask = (SpeakerConfiguration)mainMasteringVoice.ChannelMask;
            }
            VoiceDetails soundDetails     = sound.getVoiceDetails();
            VoiceDetails masteringDetails = mainMasteringVoice.VoiceDetails;
            int          srcChannelCount  = soundDetails.InputChannelCount;
            int          dstChannelCount  = masteringDetails.InputChannelCount;

            // Create an array to hold the output matrix. Warning : the minimum size of the output matrix is the number of channels in the source voice times the number of channels in the output voice.
            // Note that the outputMatrix indices are placed in the same order as the SharpDX.Multimedia.Speakers enum.
            // Don't forget there are two times more cells in the matrix if the source sound is stereo)
            float[] outputMatrix = new float[srcChannelCount * dstChannelCount];
            Array.Clear(outputMatrix, 0, outputMatrix.Length);
            // From there, we'll hope that the sound file is either mono or stereo. If the WAV had more than 2 channels, it would be to difficult to handle.
            // Similarly, we'll also only output to the front-left and front-right speakers for simplicity, e.g. like the XNA framework does.
            if (srcChannelCount == 1)             // Mono source
            {
                // Left/Right output levels:
                //   Pan -1.0: L = 1.0, R = 0.0
                //   Pan  0.0: L = 1.0, R = 1.0
                //   Pan +1.0: L = 0.0, R = 1.0
                outputMatrix[0] = (pan > 0f) ? ((1f - pan)) : 1f; // Front-left output
                outputMatrix[1] = (pan < 0f) ? ((1f + pan)) : 1f; // Front-right output
            }
            else if (srcChannelCount == 2)                        // Stereo source
            {
                // Left/Right input (Li/Ri) mix for Left/Right outputs (Lo/Ro):
                //   Pan -1.0: Lo = 0.5Li + 0.5Ri, Ro = 0.0Li + 0.0Ri
                //   Pan  0.0: Lo = 1.0Li + 0.0Ri, Ro = 0.0Li + 1.0Ri
                //   Pan +1.0: Lo = 0.0Li + 0.0Ri, Ro = 0.5Li + 0.5Ri
                if (pan <= 0f)
                {
                    outputMatrix[0] = 1f + pan * 0.5f;        // Front-left output, Left input
                    outputMatrix[1] = -pan * 0.5f;            // Front-left output, Right input
                    outputMatrix[2] = 0f;                     // Front-right output, Left input
                    outputMatrix[3] = 1f + pan;               // Front-right output, Right input
                }
                else
                {
                    outputMatrix[0] = 1f - pan;               // Front-left output, Left input
                    outputMatrix[1] = 0f;                     // Front-left output, Right input
                    outputMatrix[2] = pan * 0.5f;             // Front-right output, Left input
                    outputMatrix[3] = 1f - pan * 0.5f;        // Front-right output, Right input
                }
            }
            sound.setOutputMatrix(soundDetails.InputChannelCount, masteringDetails.InputChannelCount, outputMatrix);
        }
 internal static extern MMRESULT waveOutOpen(
     [Out] out WaveOut phwo,
     [In] int uDeviceID,
     [In] ref WaveFormatExtensible pwfx,
     //[In][MarshalAs(UnmanagedType.FunctionPtr)]DriverCallBack.Delegate dwCallback,
     [In] IntPtr /*DriverCallBack.Delegate*/ dwCallback,
     [In] IntPtr dwInstance,
     [In] DriverCallBack.TYPE fdwOpen
     );
예제 #19
0
        public void WaveFormatExtensible_Size_Is_Correct()
        {
            var ext = new WaveFormatExtensible(SampleRate.R44100, BitDepth.B16, SpeakerConfiguration.Stereo);


            Assert.Equal(22, ext.ExtraSize);
            Assert.Equal(44100, ext.SampleRate);
            Assert.Equal(16, ext.BitsPerSample);
            Assert.Equal(2, ext.Channels);
        }
예제 #20
0
        public X3DAudioEngine()
        {
            _xaudio2        = new XAudio2();
            _masteringVoice = new MasteringVoice(_xaudio2);

            _deviceFormat = _xaudio2.GetDeviceDetails(0).OutputFormat;
            _x3dAudio     = new X3DAudio(_deviceFormat.ChannelMask);

            Position = new Vector3D(0, 0, 0);
            Rotation = System.Windows.Media.Media3D.Quaternion.Identity;
        }
예제 #21
0
        /// <summary>
        /// Callback which handles setting up an MSS once the first MpegFrame after Id3v2 data has been read.
        /// </summary>
        /// <param name="mpegLayer3Frame"> First MpegFrame</param>
        /// <param name="mediaStreamAttributes">Empty dictionary for MediaStreamAttributes</param>
        /// <param name="mediaStreamDescriptions">Empty dictionary for MediaStreamDescriptions</param>
        /// <param name="mediaSourceAttributes">Empty dictionary for MediaSourceAttributes</param>
        private void ReadPastId3v2TagsCallback(
            MpegFrame mpegLayer3Frame,
            Dictionary <MediaStreamAttributeKeys, string> mediaStreamAttributes,
            List <MediaStreamDescription> mediaStreamDescriptions,
            Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes)
        {
            if (mpegLayer3Frame.FrameSize <= 0)
            {
                throw new InvalidOperationException("MpegFrame's FrameSize cannot be negative");
            }

            // Initialize the Mp3 data structures used by the Media pipeline with state from the first frame.
            WaveFormatExtensible wfx = new WaveFormatExtensible();

            this.MpegLayer3WaveFormat = new MpegLayer3WaveFormat();
            this.MpegLayer3WaveFormat.WaveFormatExtensible = wfx;

            this.MpegLayer3WaveFormat.WaveFormatExtensible.FormatTag             = 85;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.Channels              = (short)((mpegLayer3Frame.Channels == Channel.SingleChannel) ? 1 : 2);
            this.MpegLayer3WaveFormat.WaveFormatExtensible.SamplesPerSec         = mpegLayer3Frame.SamplingRate;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond = mpegLayer3Frame.Bitrate / 8;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BlockAlign            = 1;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.BitsPerSample         = 0;
            this.MpegLayer3WaveFormat.WaveFormatExtensible.ExtraDataSize         = 12;

            this.MpegLayer3WaveFormat.Id = 1;
            this.MpegLayer3WaveFormat.BitratePaddingMode = 0;
            this.MpegLayer3WaveFormat.FramesPerBlock     = 1;
            this.MpegLayer3WaveFormat.BlockSize          = (short)mpegLayer3Frame.FrameSize;
            this.MpegLayer3WaveFormat.CodecDelay         = 0;

            mediaStreamAttributes[MediaStreamAttributeKeys.CodecPrivateData] = this.MpegLayer3WaveFormat.ToHexString();
            this.audioStreamDescription = new MediaStreamDescription(MediaStreamType.Audio, mediaStreamAttributes);

            mediaStreamDescriptions.Add(this.audioStreamDescription);

            this.trackDuration = new TimeSpan(0, 0, (int)(this.audioStreamLength / MpegLayer3WaveFormat.WaveFormatExtensible.AverageBytesPerSecond));
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] = this.trackDuration.Ticks.ToString(CultureInfo.InvariantCulture);
            if (this.audioStream.CanSeek)
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "1";
            }
            else
            {
                mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = "0";
            }

            // Report that the Mp3MediaStreamSource has finished initializing its internal state and can now
            // pass in Mp3 Samples.
            this.ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);

            this.currentFrame = mpegLayer3Frame;
            this.currentFrameStartPosition = MpegFrame.FrameHeaderSize;
        }
예제 #22
0
        public void SetupTests()
        {
            this.wfx                       = new WaveFormatExtensible();
            this.wfx.FormatTag             = 85;
            this.wfx.Channels              = 2;
            this.wfx.SamplesPerSec         = 8000;
            this.wfx.AverageBytesPerSecond = 500;
            this.wfx.BlockAlign            = 1;
            this.wfx.BitsPerSample         = 16;
            this.wfx.ExtraDataSize         = 12;

            this.mp3wfx = new MpegLayer3WaveFormat();
        }
예제 #23
0
        public AudioOutput(WaveFormatExtensible format, IMediaSample mediaSample)
        {
            if (format == null)
            {
                throw new ArgumentNullException("format");
            }
            if (mediaSample == null)
            {
                throw new ArgumentNullException("mediaSample");
            }

            MediaSample = mediaSample;
            Format      = format;
        }
예제 #24
0
        internal WaveFormat BuildOutputWaveFormat(IAudioSource audioSource)
        {
            if (audioSource == null)
            {
                throw new ArgumentNullException("source");
            }

            return(new WaveFormatExtensible(
                       audioSource.WaveFormat.SampleRate,
                       audioSource.WaveFormat.BitsPerSample,
                       OutputChannelCount,
                       WaveFormatExtensible.SubTypeFromWaveFormat(audioSource.WaveFormat),
                       OutputMask));
        }
예제 #25
0
        internal unsafe void __MarshalFrom(ref DeviceDetails.__Native @ref)
        {
            fixed(char *ptr = & @ref.DeviceID)
            {
                this.DeviceID = Utilities.PtrToStringUni((IntPtr)((void *)ptr), 256);
            }

            fixed(char *ptr2 = & @ref.DisplayName)
            {
                this.DisplayName = Utilities.PtrToStringUni((IntPtr)((void *)ptr2), 256);
            }

            this.Role         = @ref.Role;
            this.OutputFormat = new WaveFormatExtensible();
            this.OutputFormat.__MarshalFrom(ref @ref.OutputFormat);
        }
예제 #26
0
        private static WaveFormat GetOutputWaveFormat(IWaveSource source, int sampleRate, ChannelMatrix channelMatrix)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (channelMatrix == null)
            {
                throw new ArgumentNullException("channelMatrix");
            }

            return(new WaveFormatExtensible(
                       sampleRate,
                       source.WaveFormat.BitsPerSample,
                       channelMatrix.OutputChannelCount,
                       WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat),
                       channelMatrix.OutputMask));
        }
예제 #27
0
 public override void Setup(WaveFormatEx format)
 {
     _format            = new WaveFormatExtensible(format.nSamplesPerSec, format.wBitsPerSample, format.nChannels);
     _voice             = new SourceVoice(_xaudio2, _format);
     _voice.StreamEnd  += _voice_StreamEnd;
     _voice.VoiceError += _voice_VoiceError;
     _emitter           = new Emitter
     {
         ChannelAzimuths     = GetAzimuths(_format.Channels),
         ChannelCount        = _format.Channels,
         ChannelRadius       = 10,
         CurveDistanceScaler = float.MinValue,
         OrientFront         = new Vector3(0, 0, 1),
         OrientTop           = new Vector3(0, 1, 0),
         Position            = new Vector3(0, 0, 0),
         Velocity            = new Vector3(0, 0, 0)
     };
 }
예제 #28
0
        /// <summary>
        /// Initialize the Audio Client
        /// </summary>
        /// <param name="shareMode">Share Mode</param>
        /// <param name="streamFlags">Stream Flags</param>
        /// <param name="bufferDuration">Buffer Duration</param>
        /// <param name="periodicity">Periodicity</param>
        /// <param name="waveFormat">Wave Format</param>
        /// <param name="audioSessionGuid">Audio Session GUID (can be null)</param>
        public int Initialize(AudioClientShareMode shareMode,
                              AudioClientStreamFlags streamFlags,
                              long bufferDuration,
                              long periodicity,
                              ref WaveFormatExtensible waveFormat,
                              Guid audioSessionGuid)
        {
            int hresult = 0;

            hresult = audioClientInterface.Initialize(shareMode, streamFlags, bufferDuration, periodicity, ref waveFormat, ref audioSessionGuid);

            if (hresult != 0)
            {
                this.EventWriterDLL.WriteLine(EventWriterDLL.SeverityTypes.Error, 0x01, "Error Code in AudioClient::Initialize: " + hresult);
            }
            // may have changed the mix format so reset it
            mixFormat = new WaveFormatExtensible();
            return(hresult);
        }
예제 #29
0
파일: DmoMediaType.cs 프로젝트: h4ltYu/EOS
        /// <summary>
        /// Sets this object up to point to a wave format
        /// </summary>
        /// <param name="waveFormat">Wave format structure</param>
        // Token: 0x06000BDE RID: 3038 RVA: 0x00023C58 File Offset: 0x00021E58
        public void SetWaveFormat(WaveFormat waveFormat)
        {
            this.majortype = MediaTypes.MEDIATYPE_Audio;
            WaveFormatExtensible waveFormatExtensible = waveFormat as WaveFormatExtensible;

            if (waveFormatExtensible == null)
            {
                WaveFormatEncoding encoding = waveFormat.Encoding;
                switch (encoding)
                {
                case WaveFormatEncoding.Pcm:
                    this.subtype = AudioMediaSubtypes.MEDIASUBTYPE_PCM;
                    goto IL_87;

                case WaveFormatEncoding.Adpcm:
                    break;

                case WaveFormatEncoding.IeeeFloat:
                    this.subtype = AudioMediaSubtypes.MEDIASUBTYPE_IEEE_FLOAT;
                    goto IL_87;

                default:
                    if (encoding == WaveFormatEncoding.MpegLayer3)
                    {
                        this.subtype = AudioMediaSubtypes.WMMEDIASUBTYPE_MP3;
                        goto IL_87;
                    }
                    break;
                }
                throw new ArgumentException(string.Format("Not a supported encoding {0}", waveFormat.Encoding));
            }
            this.subtype = waveFormatExtensible.SubFormat;
IL_87:
            this.bFixedSizeSamples = (this.SubType == AudioMediaSubtypes.MEDIASUBTYPE_PCM || this.SubType == AudioMediaSubtypes.MEDIASUBTYPE_IEEE_FLOAT);
            this.formattype        = DmoMediaTypeGuids.FORMAT_WaveFormatEx;
            if (this.cbFormat < Marshal.SizeOf(waveFormat))
            {
                throw new InvalidOperationException("Not enough memory assigned for a WaveFormat structure");
            }
            Marshal.StructureToPtr(waveFormat, this.pbFormat, false);
        }
예제 #30
0
        private static string FormatDeviceDetailsOutput(int deviceIndex, DeviceDetails details)
        {
            StringBuilder sb = new StringBuilder();

            sb.AppendLine("------------------------------------------------------------");
            sb.AppendLine(String.Format("#{0} - {1}", deviceIndex, details.DisplayName));
            sb.AppendLine(String.Format("Role: {0}", details.Role.ToString()));
            sb.AppendLine(String.Format("ID: {0}", details.DeviceId));
            sb.AppendLine("Output format: ");

            WaveFormatExtensible format = details.OutputFormat;
            string pad = "\t";

            sb.AppendLine(String.Format("{0}ChanMask: {1}\tChannels: {2}", pad, format.ChannelMask, format.Channels));
            sb.AppendLine(String.Format("{0}BlockAlign: {1}\t\tSamplesPerBlock: {2}", pad, format.BlockAlignment, format.SamplesPerBlock));
            sb.AppendLine(String.Format("{0}BitsPerSample: {1}\tSamplesPerSecond: {2}", pad, format.BitsPerSample, format.SamplesPerSecond));
            sb.AppendLine(String.Format("{0}ValidBitsPerSample: {1}\tAvgBytesPerSecond: {2}", pad, format.ValidBitsPerSample, format.AverageBytesPerSecond));
            sb.AppendLine(String.Format("{0}Tag: {1}", pad, format.FormatTag));

            return(sb.ToString());
        }
예제 #31
0
        public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, int destSampleRate)
            : base(source, destSampleRate)
        {
            if (source == null)
                throw new ArgumentNullException("source");
            if (channelMatrix == null)
                throw new ArgumentNullException("channelMatrix");

            if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
                throw new ArgumentException("source.WaveFormat.Channels != channelMatrix.InputChannelCount");

            WaveFormatExtensible inputformat = new WaveFormatExtensible(source.WaveFormat.SampleRate, source.WaveFormat.BitsPerSample,
                source.WaveFormat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat), _channelMatrix.InputMask);

            _outputformat = new WaveFormat(destSampleRate, source.WaveFormat.BitsPerSample, 6, source.WaveFormat.WaveFormatTag, source.WaveFormat.ExtraSize);
            WaveFormatExtensible outputformat = new WaveFormatExtensible(_outputformat.SampleRate, _outputformat.BitsPerSample,
            _outputformat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(_outputformat), _channelMatrix.OutputMask);

            Init(inputformat, outputformat);
            _resampler.ResamplerProps.SetUserChannelMtx(_channelMatrix.GetMatrix());
        }
예제 #32
0
        /// <summary>
        /// Initializes a new instance of the <see cref="DmoChannelResampler"/> class.
        /// </summary>
        /// <param name="source">Underlying source which has to get resampled.</param>
        /// <param name="channelMatrix"><see cref="ChannelMatrix" /> which defines how to map each channel.</param>
        /// <param name="outputFormat">Waveformat, which specifies the new format. Note, that by far not all formats are supported.</param>
        /// <exception cref="System.ArgumentNullException">
        /// source
        /// or
        /// channelMatrix
        /// or
        /// outputFormat
        /// </exception>
        /// <exception cref="System.ArgumentException">The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.</exception>
        public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, WaveFormat outputFormat)
            : base(source, outputFormat)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }
            if (channelMatrix == null)
            {
                throw new ArgumentNullException("channelMatrix");
            }
            if (outputFormat == null)
            {
                throw new ArgumentNullException("outputFormat");
            }

            if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
            {
                throw new ArgumentException(
                          "The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.");
            }

            var inputFormat = new WaveFormatExtensible(
                source.WaveFormat.SampleRate,
                source.WaveFormat.BitsPerSample,
                source.WaveFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat),
                channelMatrix.InputMask);

            Outputformat = new WaveFormatExtensible(
                outputFormat.SampleRate,
                outputFormat.BitsPerSample,
                outputFormat.Channels,
                WaveFormatExtensible.SubTypeFromWaveFormat(outputFormat),
                channelMatrix.OutputMask);

            Initialize(inputFormat, Outputformat);
            _channelMatrix = channelMatrix;
            CommitChannelMatrixChanges();
        }
예제 #33
0
        /// <summary>
        /// The IsFormatSupportedInternal method indicates whether the audio endpoint device
        /// supports a particular stream format.
        /// </summary>
        /// <returns>For exclusive mode, IsFormatSupportedInternal returns S_OK if the audio
        /// endpoint device supports the caller-specified format, or it returns
        /// AUDCLNT_E_UNSUPPORTED_FORMAT if the device does not support the format. The
        /// ppClosestMatch parameter can be NULL. If it is not NULL, the method writes NULL to
        /// *ppClosestMatch. For shared mode, if the audio engine supports the caller-specified
        /// format, IsFormatSupportedInternal sets *ppClosestMatch to NULL and returns S_OK. If the
        /// audio engine does not support the caller-specified format but does support a similar
        /// format, the method retrieves the similar format through the ppClosestMatch parameter and
        /// returns S_FALSE. If the audio engine does not support the caller-specified format or any
        /// similar format, the method sets *ppClosestMatch to NULL and returns
        /// AUDCLNT_E_UNSUPPORTED_FORMAT.</returns>
        public unsafe int IsFormatSupportedNative(AudioClientShareMode shareMode, WaveFormat waveFormat, out WaveFormatExtensible closestMatch)
        {
            closestMatch = null;
            var hClosestMatch = GCHandle.Alloc(closestMatch, GCHandleType.Pinned);
            var hWaveFormat = GCHandle.Alloc(waveFormat, GCHandleType.Pinned);

            IntPtr pclosestmatch = hClosestMatch.AddrOfPinnedObject();

            var result = InteropCalls.CallI(_basePtr, shareMode, hWaveFormat.AddrOfPinnedObject().ToPointer(),
                &pclosestmatch, ((void**)(*(void**)_basePtr))[7]);

            hWaveFormat.Free();
            hClosestMatch.Free();

            return result;
        }
예제 #34
0
        private WaveFormat SetupWaveFormat(WaveFormat waveFormat, AudioClient audioClient)
        {
            WaveFormatExtensible closestMatch;
            WaveFormat finalFormat = waveFormat;
            if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch))
            {
                if (closestMatch == null)
                {
                    WaveFormat mixformat = audioClient.GetMixFormat();
                    if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat))
                    {
                        WaveFormatExtensible[] possibleFormats = new WaveFormatExtensible[]
                        {
                            new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                            new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm),
                            new WaveFormatExtensible(waveFormat.SampleRate, 8,  waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm)
                        };

                        if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                        {
                            //no format found...
                            possibleFormats = new WaveFormatExtensible[]
                            {
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 2, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8,  2, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 32, 1, DMO.MediaTypes.MEDIATYPE_IeeeFloat),
                                new WaveFormatExtensible(waveFormat.SampleRate, 24, 1, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 16, 1, DMO.MediaTypes.MEDIATYPE_Pcm),
                                new WaveFormatExtensible(waveFormat.SampleRate, 8,  1, DMO.MediaTypes.MEDIATYPE_Pcm)
                            };

                            if (CheckForSupportedFormat(audioClient, possibleFormats, out mixformat))
                            {
                                throw new NotSupportedException("Could not find a supported format.");
                            }
                        }
                    }

                    finalFormat = mixformat;
                }
                else
                {
                    finalFormat = closestMatch;
                }
            }

            return finalFormat;
        }
예제 #35
0
 public override void OnGetMediaType(WaveFormatExtensible format)
 {
     GuiThread.DoAsync(delegate
     {
         // This has to be done via GuiThread.DoAsync because when this method is called
         // Player.Filters has not been populated
         // Using GuiThread.DoAsync essentially queues this delegate until the media file
         // is actually opened and all Player fields have been populated
         if (Player.Filters.Any(f => f.ClsId == s_SanearSoundClsId))
         {
             m_Sanear = true;
         }
         else if (Player.Filters.Any(f => f.ClsId == s_DirectSoundClsId || f.ClsId == s_WaveOutClsId))
         {
             m_DirectSoundWaveOut = true;
         }
         else
         {
             Player.OsdText.Show("Warning: Audio renderer is incompatible with Reclock. Reclock disabled!");
         }
     });
 }
예제 #36
0
        /// <summary>
        /// Checks whether the audio endpoint device supports a particular stream format.
        /// </summary>
        public bool IsFormatSupported(AudioClientShareMode shareMode, WaveFormat waveFormat, out WaveFormatExtensible closestMatch)
        {
            int result = IsFormatSupportedNative(shareMode, waveFormat, out closestMatch);
            switch (result)
            {
                case 0x0:
                    return true;

                case 0x1:
                case unchecked((int)0x88890008):
                    return false;

                default:
                    CoreAudioAPIException.Try(result, c, "IsFormatSupported");
                    return false;
            }
        }
예제 #37
0
        public Sound LoadOgg(string fileName)
        {
            var stream = new OggVorbisFileStream(fileName);
            VorbisInfo vInfo = stream.Info;
            WaveFormatExtensible wfe = new WaveFormatExtensible
            {
                // cbSize
                BitsPerSample = 16,
                Channels = (short)vInfo.Channels,
                SamplesPerSecond = vInfo.Rate,      // ogg vorbis always uses 16 bits
                AverageBytesPerSecond = vInfo.Rate * vInfo.Channels * 2,
                BlockAlignment = (short)(2 * vInfo.Channels),
                FormatTag = WaveFormatTag.Pcm
            };

            AudioBuffer buffer = new AudioBuffer();
            buffer.AudioData = stream;
            buffer.AudioBytes = (int)stream.Length;
            buffer.Flags = BufferFlags.EndOfStream;

            return new Sound { Buffer = buffer, Stream = stream, Format = wfe, Program = this };
        }