Пример #1
0
 /// <summary>
 /// Initializes a new instance of the <see cref="PrimarySoundBuffer"/> class.
 /// </summary>
 /// <param name="dSound">an instance of the <see cref="DirectSound"/></param>
 /// <param name="bufferDescription">The buffer description.</param>
 public PrimarySoundBuffer(DirectSound dSound, SoundBufferDescription bufferDescription)
     : base(IntPtr.Zero)
 {
     IntPtr temp;
     dSound.CreateSoundBuffer(bufferDescription, out temp, null);
     NativePointer = temp;
 }
Пример #2
0
        /// <summary>
        /// Initializes a new instance of the <see cref="PrimarySoundBuffer"/> class.
        /// </summary>
        /// <param name="dSound">an instance of the <see cref="DirectSound"/></param>
        /// <param name="bufferDescription">The buffer description.</param>
        public PrimarySoundBuffer(DirectSound dSound, SoundBufferDescription bufferDescription)
            : base(IntPtr.Zero)
        {
            IntPtr temp;

            dSound.CreateSoundBuffer(bufferDescription, out temp, null);
            NativePointer = temp;
        }
Пример #3
0
        static void Main(string[] args)
        {
            DirectSound directSound = new DirectSound();

            var form = new Form();
            form.Text = "SharpDX - DirectSound Demo";

            // Set Cooperative Level to PRIORITY (priority level can call the SetFormat and Compact methods)
            //
            directSound.SetCooperativeLevel(form.Handle, CooperativeLevel.Priority);

            // Create PrimarySoundBuffer
            var primaryBufferDesc = new SoundBufferDescription();
            primaryBufferDesc.Flags = BufferFlags.PrimaryBuffer;
            primaryBufferDesc.AlgorithmFor3D = Guid.Empty;

            var primarySoundBuffer = new PrimarySoundBuffer(directSound, primaryBufferDesc);

            // Play the PrimarySound Buffer
            primarySoundBuffer.Play(0, PlayFlags.Looping);

            // Default WaveFormat Stereo 44100 16 bit
            WaveFormat waveFormat = new WaveFormat();

            // Create SecondarySoundBuffer
            var secondaryBufferDesc = new SoundBufferDescription();
            secondaryBufferDesc.BufferBytes = waveFormat.ConvertLatencyToByteSize(60000);
            secondaryBufferDesc.Format = waveFormat;
            secondaryBufferDesc.Flags = BufferFlags.GetCurrentPosition2 | BufferFlags.ControlPositionNotify | BufferFlags.GlobalFocus |
                                        BufferFlags.ControlVolume | BufferFlags.StickyFocus;
            secondaryBufferDesc.AlgorithmFor3D = Guid.Empty;
            var secondarySoundBuffer = new SecondarySoundBuffer(directSound, secondaryBufferDesc);

            // Get Capabilties from secondary sound buffer
            var capabilities = secondarySoundBuffer.Capabilities;

            // Lock the buffer
            DataStream dataPart2;
            var dataPart1 =secondarySoundBuffer.Lock(0, capabilities.BufferBytes,  LockFlags.EntireBuffer, out dataPart2);

            // Fill the buffer with some sound
            int numberOfSamples = capabilities.BufferBytes/waveFormat.BlockAlign;
            for (int i = 0; i < numberOfSamples; i++)
            {
                double vibrato = Math.Cos(2 * Math.PI * 10.0 * i /waveFormat.SampleRate);
                short value = (short) (Math.Cos(2*Math.PI*(220.0 + 4.0 * vibrato)*i/waveFormat.SampleRate)*16384); // Not too loud
                dataPart1.Write(value);
                dataPart1.Write(value);
            }

            // Unlock the buffer
            secondarySoundBuffer.Unlock(dataPart1, dataPart2);

            // Play the song
            secondarySoundBuffer.Play(0, PlayFlags.Looping);
           
            Application.Run(form);
        }
Пример #4
0
        /// <summary>
        /// Initializes a new instance of the <see cref="SecondarySoundBuffer"/> class.
        /// </summary>
        /// <param name="dSound">an instance of the <see cref="DirectSound"/></param>
        /// <param name="bufferDescription">The buffer description.</param>
        public SecondarySoundBuffer(DirectSound dSound, SoundBufferDescription bufferDescription)
            : base(IntPtr.Zero)
        {
            IntPtr temp;

            dSound.CreateSoundBuffer(bufferDescription, out temp, null);
            NativePointer = temp;
            QueryInterfaceFrom(this);
            Marshal.Release(temp);
        }
        /// <summary>Creates a sound.</summary>
        /// <param name="samples">A byte array containing the sample data (Mono 16-bit Signed samples).</param>
        /// <param name="frequency">The sample frequency in herz.</param>
        /// <param name="soundIndex">The index of the sound.</param>
        /// <param name="speakerSound">Whether the sound is a speaker sound otherwise it is a sampled sound.</param>
        /// <returns>The Sound.</returns>
        public override SoundSystem.Sound CreateSound(byte[] samples, int frequency, int soundIndex, bool speakerSound)
        {
            WaveFormat waveFormat = new WaveFormat(frequency, 16, 1);

            SoundBufferDescription soundBufferDescription = new SoundBufferDescription();
            soundBufferDescription.Format = waveFormat;
            soundBufferDescription.Flags = BufferFlags.ControlVolume;
            soundBufferDescription.BufferBytes = samples.Length;

            SecondarySoundBuffer secondarySoundBuffer = new SecondarySoundBuffer(_directSound, soundBufferDescription);
            secondarySoundBuffer.Write(samples, 0, LockFlags.EntireBuffer);

            return new SharpDXSound(secondarySoundBuffer);
        }
Пример #6
0
        public DXWavePlayer(int device, int BufferByteSize, DataRequestDelegate fillProc)
        {
            if (BufferByteSize < 1000)
            {
                throw new ArgumentOutOfRangeException("BufferByteSize", "minimal size of buffer is 1000 bytes");
            }

            _buffersize = BufferByteSize;
            _requestproc = fillProc;
            var devices = DirectSound.GetDevices();
            if (device <= 0 || device >= devices.Count)
            {
                device = 0;
            }

            _outputDevice = new DirectSound(devices[device].DriverGuid);

            System.Windows.Interop.WindowInteropHelper wh = new System.Windows.Interop.WindowInteropHelper(Application.Current.MainWindow);
            _outputDevice.SetCooperativeLevel(wh.Handle, CooperativeLevel.Priority);

            _buffDescription = new SoundBufferDescription();
            _buffDescription.Flags = BufferFlags.ControlPositionNotify | BufferFlags.ControlFrequency | BufferFlags.ControlEffects | BufferFlags.GlobalFocus | BufferFlags.GetCurrentPosition2;
            _buffDescription.BufferBytes = BufferByteSize * InternalBufferSizeMultiplier;

            WaveFormat format = new WaveFormat(16000, 16, 1);

            _buffDescription.Format = format;

            _soundBuffer = new SecondarySoundBuffer(_outputDevice, _buffDescription);
            _synchronizer = new AutoResetEvent(false);

            NotificationPosition[] nots = new NotificationPosition[InternalBufferSizeMultiplier];

            NotificationPosition not;
            int bytepos = 800;
            for (int i = 0; i < InternalBufferSizeMultiplier; i++)
            {
                not = new NotificationPosition();
                not.Offset = bytepos;
                not.WaitHandle = _synchronizer;
                nots[i] = not;
                bytepos += BufferByteSize;
            }

            _soundBuffer.SetNotificationPositions(nots);

            _waitThread = new Thread(new ThreadStart(DataRequestThread)) { Name = "MyWavePlayer.DataRequestThread" };
            _waitThread.Start();
        }
Пример #7
0
        protected override bool LoadAudioFile(string audioFile, DirectSound directSound)
        {
            try
            {
                // Open the wave file in binary.
                var reader = new BinaryReader(File.OpenRead(SystemConfiguration.DataFilePath + audioFile));

                // Read in the wave file header.
                chunkId = new string(reader.ReadChars(4));
                chunkSize = reader.ReadInt32();
                format = new string(reader.ReadChars(4));
                subChunkId = new string(reader.ReadChars(4));
                subChunkSize = reader.ReadInt32();
                audioFormat = (WaveFormatEncoding )reader.ReadInt16();
                numChannels = reader.ReadInt16();
                sampleRate = reader.ReadInt32();
                bytesPerSecond = reader.ReadInt32();
                blockAlign = reader.ReadInt16();
                bitsPerSample = reader.ReadInt16();
                dataChunkId = new string(reader.ReadChars(4));
                dataSize = reader.ReadInt32();

                // Check that the chunk ID is the RIFF format
                // and the file format is the WAVE format
                // and sub chunk ID is the fmt format
                // and the audio format is PCM
                // and the wave file was recorded in stereo format
                // and at a sample rate of 44.1 KHz
                // and at 16 bit format
                // and there is the data chunk header.
                // Otherwise return false.
                if (chunkId != "RIFF" || format != "WAVE" || subChunkId.Trim() != "fmt" || audioFormat != WaveFormatEncoding.Pcm || numChannels != 2 || sampleRate != 44100 || bitsPerSample != 16 || dataChunkId != "data")
                    return false;

                // Set the buffer description of the secondary sound buffer that the wave file will be loaded onto and the wave format.
                var buffer = new SoundBufferDescription();
                buffer.Flags = BufferFlags.ControlVolume;
                buffer.BufferBytes = dataSize;
                buffer.Format = new WaveFormat(44100, 16, 2);
                buffer.AlgorithmFor3D = Guid.Empty;

                // Create a temporary sound buffer with the specific buffer settings.
                _SecondaryBuffer = new SecondarySoundBuffer(directSound, buffer);

                // Read in the wave file data into the temporary buffer.
                var waveData = reader.ReadBytes(dataSize);

                // Close the reader
                reader.Close();

                // Lock the secondary buffer to write wave data into it.
                DataStream waveBufferData2;
                var waveBufferData1 = _SecondaryBuffer.Lock(0, dataSize, LockFlags.None, out waveBufferData2);

                // Copy the wave data into the buffer.
                waveBufferData1.Write(waveData, 0, dataSize);

                // Unlock the secondary buffer after the data has been written to it.
                var result = _SecondaryBuffer.Unlock(waveBufferData1, waveBufferData2);

                if (result != Result.Ok)
                    return false;
            }
            catch
            {
                return false;
            }

            return true;
        }
Пример #8
0
        bool InitializeDirectSound(IntPtr windowHandler)
        {
            try
            {
                // Initialize the direct sound interface pointer for the default sound device.
                _DirectSound = new DirectSound();

                // Set the cooperative level to priority so the format of the primary sound buffer can be modified.
                if (_DirectSound.SetCooperativeLevel(windowHandler, CooperativeLevel.Priority) != Result.Ok)
                    return false;

                // Setup the primary buffer description.
                var buffer = new SoundBufferDescription();
                buffer.Flags = BufferFlags.PrimaryBuffer | BufferFlags.ControlVolume;
                buffer.AlgorithmFor3D = Guid.Empty;

                // Get control of the primary sound buffer on the default sound device.
                _PrimaryBuffer = new PrimarySoundBuffer(_DirectSound, buffer);

                // Setup the format of the primary sound buffer.
                // In this case it is a .
                _PrimaryBuffer.Format = new WaveFormat(44100, 16, 2);
            }
            catch (Exception)
            {
                return false;
            }

            return true;
        }
Пример #9
0
        public void initialize(int samplesPerSecond, int bytesPerSample, int nrChannels,
            int bufferSizeBytes)
        {

            try
            {

                if (directSound == null)
                {

                    directSound = new DirectSound();
                    directSound.SetCooperativeLevel(owner.Handle, CooperativeLevel.Priority);
                }

                releaseResources();

                this.bufferSizeBytes = bufferSizeBytes;
                this.bytesPerSample = bytesPerSample;
                this.samplesPerSecond = samplesPerSecond;
                this.nrChannels = nrChannels;

                SoundBufferDescription desc = new SoundBufferDescription();
                desc.BufferBytes = bufferSizeBytes;
                desc.Flags = BufferFlags.Defer | BufferFlags.GlobalFocus |
                    BufferFlags.ControlVolume | BufferFlags.ControlFrequency |
                    BufferFlags.GetCurrentPosition2;

                //desc.AlgorithmFor3D = Guid.Empty;

                int blockAlign = nrChannels * bytesPerSample;
                int averageBytesPerSecond = samplesPerSecond * blockAlign;

                WaveFormat format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm,
                    samplesPerSecond, nrChannels, averageBytesPerSecond, blockAlign, bytesPerSample * 8);

                desc.Format = format;
                //desc.Format.Encoding = WaveFormatEncoding.Pcm;
                /*
                            desc.Format.SampleRate = samplesPerSecond;
                            desc.Format.BitsPerSample = bytesPerSample * 8;
                            desc.Format.Channels = nrChannels;
                            desc.Format.FormatTag = WaveFormatTag.Pcm;
                            desc.Format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8));
                            desc.Format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;
                */

                /*desc.DeferLocation = true;
                desc.GlobalFocus = true;
                desc.ControlVolume = true;
                desc.CanGetCurrentPosition = true;
                desc.ControlFrequency = true;*/

                silence = new byte[bufferSizeBytes];
                Array.Clear(silence, 0, silence.Length);

                audioBuffer = new SecondarySoundBuffer(directSound, desc);

                Volume = volume;
                offsetBytes = 0;
                prevPlayPos = 0;
                ptsPos = 0;
                prevPtsPos = 0;
                playLoops = 0;
                ptsLoops = 0;

                log.Info("Direct Sound Initialized");

            }
            catch (SharpDX.SharpDXException e)
            {
                log.Error("Error initializing Direct Sound", e);
                MessageBox.Show("Error initializing Direct Sound: " + e.Message, "Direct Sound Error");
            }
            catch (Exception e)
            {
                log.Error("Error initializing Direct Sound", e);
            }
        }
Пример #10
0
 private void PlayFile(FileInfo FI)
 {
     lock (this) {
     if (this.DS == null) {
       this.DS = new DirectSound();
       this.DS.SetCooperativeLevel(this.Handle, CooperativeLevel.Normal);
     }
     this.StopPlayback();
     var bd = new SoundBufferDescription {
       Format      = new WaveFormat(FI.AudioFile.SampleRate, 16, FI.AudioFile.Channels),
       BufferBytes = this.AudioBufferSize,
       Flags       = BufferFlags.GlobalFocus | BufferFlags.StickyFocus | BufferFlags.ControlVolume | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlPositionNotify
     };
     this.CurrentBuffer = new SecondarySoundBuffer(this.DS, bd);
     if (this.AudioUpdateTrigger == null)
       this.AudioUpdateTrigger = new AutoResetEvent(false);
     var chunkSize = this.AudioBufferSize / this.AudioBufferMarkers;
     var updatePositions = new NotificationPosition[this.AudioBufferMarkers];
     for (var i = 0; i < this.AudioBufferMarkers; ++i) {
       updatePositions[i] = new NotificationPosition() {
     WaitHandle = this.AudioUpdateTrigger,
     Offset = chunkSize * i
       };
     }
     this.CurrentBuffer.SetNotificationPositions(updatePositions);
     this.CurrentStream = FI.AudioFile.OpenStream();
     {
       var bytes = new byte[this.CurrentBuffer.Capabilities.BufferBytes];
       var readbytes = this.CurrentStream.Read(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes);
       if (readbytes < this.CurrentBuffer.Capabilities.BufferBytes)
     Array.Clear(bytes, readbytes, this.CurrentBuffer.Capabilities.BufferBytes - readbytes);
       DataStream audiodata2;
       var audiodata1 = this.CurrentBuffer.Lock(0, this.CurrentBuffer.Capabilities.BufferBytes, LockFlags.EntireBuffer, out audiodata2);
       audiodata1.Write(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes);
       this.CurrentBuffer.Unlock(audiodata1, audiodata2);
     }
     if (this.CurrentStream.Position < this.CurrentStream.Length) {
       this.AudioUpdateTrigger.Reset();
       this.AudioUpdateThread = new Thread(this.AudioUpdate);
       this.AudioUpdateThread.Start();
       this.btnPause.Enabled = true;
       this.btnStop.Enabled = true;
       this.AudioIsLooping = true;
     }
     else {
       this.CurrentStream.Close();
       this.CurrentStream = null;
       this.AudioIsLooping = false;
     }
     this.CurrentBuffer.Play(0, (this.AudioIsLooping ? PlayFlags.Looping : PlayFlags.None));
       }
 }
Пример #11
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:SharpDX.DirectSound.FullDuplex" /> class.
 /// </summary>
 /// <param name="captureDevice" />
 /// <param name="playbackDevice" />
 /// <param name="captureDescription" />
 /// <param name="bufferDescription" />
 /// <param name="windowHandle" />
 /// <param name="level" />
 /// <param name="captureBuffer" />
 /// <param name="secondaryBuffer" />
 public FullDuplex(Guid captureDevice, Guid playbackDevice, CaptureBufferDescription captureDescription, SoundBufferDescription bufferDescription, IntPtr windowHandle, CooperativeLevel level, out CaptureBuffer captureBuffer, out SecondarySoundBuffer secondaryBuffer)
 {
     DSound.FullDuplexCreate(captureDevice, playbackDevice, captureDescription, bufferDescription, windowHandle, (int)level, this,
                             out captureBuffer, out secondaryBuffer, null);
 }
Пример #12
0
        /// <summary>
        ///   Constructs a new Audio Output Device.
        /// </summary>
        /// 
        /// <param name="device">Global identifier of the audio output device.</param>
        /// <param name="owner">The owner window handle.</param>
        /// <param name="samplingRate">The sampling rate of the device.</param>
        /// <param name="channels">The number of channels of the device.</param>
        /// 
        public AudioOutputDevice(Guid device, IntPtr owner, int samplingRate, int channels)
        {
            this.owner = owner;
            this.samplingRate = samplingRate;
            this.channels = channels;
            this.device = device;

            DirectSound ds = new DirectSound(device);
            ds.SetCooperativeLevel(owner, CooperativeLevel.Priority);


            // Set the output format
            WaveFormat waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(samplingRate, channels);
            bufferSize = 8 * waveFormat.AverageBytesPerSecond;


            // Setup the secondary buffer
            SoundBufferDescription desc2 = new SoundBufferDescription();
            desc2.Flags =
                BufferFlags.GlobalFocus |
                BufferFlags.ControlPositionNotify |
                BufferFlags.GetCurrentPosition2;
            desc2.BufferBytes = bufferSize;
            desc2.Format = waveFormat;

            buffer = new SecondarySoundBuffer(ds, desc2);


            var list = new List<NotificationPosition>();
            int numberOfPositions = 32;

            // Set notification for buffer percentiles
            for (int i = 0; i < numberOfPositions; i++)
            {
                list.Add(new NotificationPosition()
                {              
                    WaitHandle = new AutoResetEvent(false),
                    Offset = i * bufferSize / numberOfPositions + 1,
                });
            }

            // Set notification for end of buffer
            list.Add(new NotificationPosition()
            {
                Offset = bufferSize - 1,
                WaitHandle = new AutoResetEvent(false)
            });

            firstHalfBufferIndex = numberOfPositions / 2;
            secondHalfBufferIndex = numberOfPositions;

            notifications = list.ToArray();

            System.Diagnostics.Debug.Assert(notifications[firstHalfBufferIndex].Offset == bufferSize / 2 + 1);
            System.Diagnostics.Debug.Assert(notifications[secondHalfBufferIndex].Offset == bufferSize - 1);

            // Make a copy of the wait handles
            waitHandles = new WaitHandle[notifications.Length];
            for (int i = 0; i < notifications.Length; i++)
                waitHandles[i] = notifications[i].WaitHandle;

            // Store all notification positions
            buffer.SetNotificationPositions(notifications);
        }
Пример #13
0
        public void initialize(int samplesPerSecond, int bytesPerSample, int nrChannels,
            int bufferSizeBytes)
        {

            try
            {
                if (directSound == null)
                {
                    directSound = new DirectSound();
                    directSound.SetCooperativeLevel(owner.Handle, CooperativeLevel.Priority);                
                }
             
                releaseResources();

                this.bufferSizeBytes = bufferSizeBytes;
                this.bytesPerSample = bytesPerSample;
                this.samplesPerSecond = samplesPerSecond;
                this.nrChannels = nrChannels;

                SoundBufferDescription desc = new SoundBufferDescription();
                desc.BufferBytes = bufferSizeBytes;
                desc.Flags = BufferFlags.Defer | BufferFlags.GlobalFocus |
                    BufferFlags.ControlVolume | BufferFlags.ControlFrequency |
                    BufferFlags.GetCurrentPosition2;
               
                //desc.AlgorithmFor3D = Guid.Empty;

                int blockAlign = nrChannels * bytesPerSample;
                int averageBytesPerSecond = samplesPerSecond * blockAlign;

                WaveFormat format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm,
                    samplesPerSecond, nrChannels, averageBytesPerSecond, blockAlign, bytesPerSample * 8);

                desc.Format = format;
           
                silence = new char[bufferSizeBytes];
                Array.Clear(silence, 0, silence.Length);

                audioBuffer = new SecondarySoundBuffer(directSound, desc);

                Volume = volume;
                offsetBytes = 0;
                prevPlayPos = 0;
                ptsPos = 0;
                prevPtsPos = 0;
                playLoops = 0;
                ptsLoops = 0;

                //log.Info("Direct Sound Initialized");

            }       
            catch (Exception e)
            {
                throw new VideoPlayerException("Error initializing Direct Sound: " + e.Message, e);             
            }
        }
Пример #14
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:SharpDX.DirectSound.FullDuplex" /> class.
 /// </summary>
 /// <param name="captureDevice" />
 /// <param name="playbackDevice" />
 /// <param name="captureDescription" />
 /// <param name="bufferDescription" />
 /// <param name="windowHandle" />
 /// <param name="level" />
 /// <param name="captureBuffer" />
 /// <param name="secondaryBuffer" />
 public FullDuplex(Guid captureDevice, Guid playbackDevice, CaptureBufferDescription captureDescription, SoundBufferDescription bufferDescription, IntPtr windowHandle, CooperativeLevel level, out CaptureBuffer captureBuffer, out SecondarySoundBuffer secondaryBuffer)
 {
     DSound.FullDuplexCreate(captureDevice, playbackDevice, captureDescription, bufferDescription, windowHandle, (int) level, this,
                             out captureBuffer, out secondaryBuffer, null);
 }
Пример #15
0
        private void SetUpAudioDevice()
        {
            if (AudioDevice == null)
            {
                AudioDevice = new DirectSound();
                AudioDevice.SetCooperativeLevel(new WindowInteropHelper(Owner).Handle, CooperativeLevel.Normal);
            }

            if (AudioBuffer == null)
            {
                BufferDesc = new SoundBufferDescription();
                BufferDesc.Flags = BufferFlags.GlobalFocus | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlVolume;
                BufferDesc.AlgorithmFor3D = Guid.Empty;

                //Вывод звука в Ависинте (через DirectShow\VFW)
                //v2.57   |   global OPT_AllowFloatAudio = True (по дефолту FLOAT преобразуется в 16бит, а 32бит и 24бит - выводятся как есть)
                //v2.58   |   global OPT_UseWaveExtensible = True (по дефолту WaveFormatExtensible не используется, даже для многоканального и многобитного звука)
                //v2.60   |   global OPT_dwChannelMask(int v) (переназначение дефолтной конфигурации каналов при использовании WaveFormatExtensible)
                //FFCHANNEL_LAYOUT в FFMS2

                if (reader.GetVarBoolean("OPT_UseWaveExtensible", true)) //У нас свой дефолт
                {
                    #region WaveFormatExtensible
                    WaveFormatExtensible format = new WaveFormatExtensible(reader.Samplerate, reader.BitsPerSample, reader.Channels);

                    //SharpDX считает, что весь 32-битный звук - FLOAT
                    if (reader.Clip.SampleType == AudioSampleType.INT32)
                        format.GuidSubFormat = new Guid("00000001-0000-0010-8000-00aa00389b71"); //PCM

                    #region channels
                    //AviSynth (дефолт)
                    //Chan. Mask MS channels
                    //----- ------ -----------------------
                    //1   0x0004 FC                      4
                    //2   0x0003 FL FR                   3
                    //3   0x0007 FL FR FC                7
                    //4   0x0033 FL FR BL BR             51
                    //5   0x0037 FL FR FC BL BR          55
                    //6   0x003F FL FR FC LF BL BR       63
                    //7   0x013F FL FR FC LF BL BR BC    319
                    //8   0x063F FL FR FC LF BL BR SL SR 1599

                    int mask = reader.GetVarInteger("OPT_dwChannelMask", -1);
                    if (mask != -1) format.ChannelMask = (Speakers)mask;
                    else if (reader.Channels == 1) format.ChannelMask = Speakers.Mono; //4
                    //else if (reader.Channels == 2) format.ChannelMask = Speakers.Stereo; //3
                    else if (reader.Channels == 3) format.ChannelMask = Speakers.Stereo | Speakers.FrontCenter; //7 //TwoPointOne; //11
                    else if (reader.Channels == 4) format.ChannelMask = Speakers.Quad; //51
                    else if (reader.Channels == 5) format.ChannelMask = Speakers.Quad | Speakers.FrontCenter; //55  //FourPointOne; //59
                    //else if (reader.Channels == 6) format.ChannelMask = Speakers.FivePointOne; //63
                    else if (reader.Channels == 7) format.ChannelMask = Speakers.FivePointOne | Speakers.BackCenter; //319
                    else if (reader.Channels == 8) format.ChannelMask = Speakers.SevenPointOneSurround; //1599  //SevenPointOne; //255
                    /*else //Этот способ уже был использован при вызове конструктора, хз насколько он корректный и насколько корректно всё то, что выше..
                    {
                        //NAudio\SharpDX
                        int dwChannelMask = 0;
                        for (int n = 0; n < 1; n++) dwChannelMask |= (1 << n);
                        format.ChannelMask = (Speakers)dwChannelMask;

                        //ch mask (SlimDX) [SharpDX]
                        //1    1 (FrontLeft) [FrontLeft]
                        //2    3 (Stereo) [FrontLeft | FrontRight]
                        //3    7 (Mono) [FrontLeft | FrontRight | FrontCenter]
                        //4   15 (Mono) [FrontLeft | FrontRight | FrontCenter | LowFrequency]
                        //5   31 (TwoPointOne | Mono | BackLeft) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft]
                        //6   63 (FivePointOne) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft | BackRight]
                        //7  127 (FivePointOne | FrontLeftOfCenter) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft | BackRight | FrontLeftOfCenter]
                        //8  255 (SevenPointOne) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft | BackRight | FrontLeftOfCenter | FrontRightOfCenter]
                    }*/
                    #endregion

                    samplesPerHalfBuff = (int)((format.SampleRate / 2) * k); //Кол-во сэмплов на половину буфера
                    bytesPerSample = format.BlockAlign;

                    BufferDesc.BufferBytes = samplesPerHalfBuff * format.BlockAlign * 2; //Кол-во байт на полный буфер
                    BufferDesc.Format = format;
                    #endregion
                }
                else
                {
                    #region WaveFormat
                    WaveFormatEncoding tag = (reader.Clip.SampleType == AudioSampleType.FLOAT) ? WaveFormatEncoding.IeeeFloat : WaveFormatEncoding.Pcm;
                    WaveFormat format = WaveFormat.CreateCustomFormat(tag, reader.Samplerate, reader.Channels, reader.Clip.AvgBytesPerSec, reader.Channels * reader.Clip.BytesPerSample, reader.BitsPerSample);

                    samplesPerHalfBuff = (int)((format.SampleRate / 2) * k); //Кол-во сэмплов на половину буфера
                    bytesPerSample = format.BlockAlign;

                    BufferDesc.BufferBytes = samplesPerHalfBuff * format.BlockAlign * 2; //Кол-во байт на полный буфер
                    BufferDesc.Format = format;
                    #endregion
                }

                AudioBuffer = new SecondarySoundBuffer(AudioDevice, BufferDesc);

                if (ABuffer == null)
                    ABuffer = new byte[BufferDesc.BufferBytes / 2];

                if (!h.IsAllocated)
                    h = GCHandle.Alloc(ABuffer, GCHandleType.Pinned);
            }
        }