コード例 #1
0
        bool InitializeDirectSound(IntPtr hwnd)
        {
            try {
                directSound = new SharpDX.DirectSound.DirectSound();
                directSound.SetCooperativeLevel(hwnd, CooperativeLevel.Priority);

                var soundBufferDescription = new SoundBufferDescription {
                    Flags          = BufferFlags.PrimaryBuffer | BufferFlags.ControlVolume,
                    BufferBytes    = 0,
                    Format         = null,
                    AlgorithmFor3D = Guid.Empty
                };

                primaryBuffer = new PrimarySoundBuffer(directSound, soundBufferDescription);

                var samplesPerSec   = 44100;
                var bitsPerSample   = 16;
                var nChannels       = 2;
                var blockAlign      = bitsPerSample / 8 * nChannels;
                var nAvgBytesPerSec = samplesPerSec * blockAlign;
                var waveFormat      = WaveFormat.CreateCustomFormat(
                    WaveFormatEncoding.Pcm,
                    samplesPerSec,
                    nChannels,
                    nAvgBytesPerSec,
                    blockAlign,
                    bitsPerSample
                    );

                primaryBuffer.Format = waveFormat;
            } catch { return(false); }
            return(true);
        }
コード例 #2
0
ファイル: PrimarySoundBuffer.cs プロジェクト: numo16/SharpDX
 /// <summary>
 /// Initializes a new instance of the <see cref="PrimarySoundBuffer"/> class.
 /// </summary>
 /// <param name="dSound">an instance of the <see cref="DirectSound"/></param>
 /// <param name="bufferDescription">The buffer description.</param>
 public PrimarySoundBuffer(DirectSound dSound, SoundBufferDescription bufferDescription)
     : base(IntPtr.Zero)
 {
     IntPtr temp;
     dSound.CreateSoundBuffer(bufferDescription, out temp, null);
     NativePointer = temp;
 }
コード例 #3
0
ファイル: Sound.cs プロジェクト: nyx1220/sharpdx-examples
        bool InitializeDirectSound(IntPtr windowHandler)
        {
            try
            {
                // Initialize the direct sound interface pointer for the default sound device.
                _DirectSound = new DirectSound();

                // Set the cooperative level to priority so the format of the primary sound buffer can be modified.
                if (_DirectSound.SetCooperativeLevel(windowHandler, CooperativeLevel.Priority) != Result.Ok)
                    return false;

                // Setup the primary buffer description.
                var buffer = new SoundBufferDescription();
                buffer.Flags = BufferFlags.PrimaryBuffer | BufferFlags.ControlVolume;
                buffer.AlgorithmFor3D = Guid.Empty;

                // Get control of the primary sound buffer on the default sound device.
                _PrimaryBuffer = new PrimarySoundBuffer(_DirectSound, buffer);

                // Setup the format of the primary sound buffer.
                // In this case it is a .
                _PrimaryBuffer.Format = new WaveFormat(44100, 16, 2);
            }
            catch (Exception)
            {
                return false;
            }

            return true;
        }
コード例 #4
0
ファイル: Program.cs プロジェクト: numo16/SharpDX
        static void Main(string[] args)
        {
            DirectSound directSound = new DirectSound();

            var form = new Form();
            form.Text = "SharpDX - DirectSound Demo";

            // Set Cooperative Level to PRIORITY (priority level can call the SetFormat and Compact methods)
            //
            directSound.SetCooperativeLevel(form.Handle, CooperativeLevel.Priority);

            // Create PrimarySoundBuffer
            var primaryBufferDesc = new SoundBufferDescription();
            primaryBufferDesc.Flags = BufferFlags.PrimaryBuffer;
            primaryBufferDesc.AlgorithmFor3D = Guid.Empty;

            var primarySoundBuffer = new PrimarySoundBuffer(directSound, primaryBufferDesc);

            // Play the PrimarySound Buffer
            primarySoundBuffer.Play(0, PlayFlags.Looping);

            // Default WaveFormat Stereo 44100 16 bit
            WaveFormat waveFormat = new WaveFormat();

            // Create SecondarySoundBuffer
            var secondaryBufferDesc = new SoundBufferDescription();
            secondaryBufferDesc.BufferBytes = waveFormat.ConvertLatencyToByteSize(60000);
            secondaryBufferDesc.Format = waveFormat;
            secondaryBufferDesc.Flags = BufferFlags.GetCurrentPosition2 | BufferFlags.ControlPositionNotify | BufferFlags.GlobalFocus |
                                        BufferFlags.ControlVolume | BufferFlags.StickyFocus;
            secondaryBufferDesc.AlgorithmFor3D = Guid.Empty;
            var secondarySoundBuffer = new SecondarySoundBuffer(directSound, secondaryBufferDesc);

            // Get Capabilties from secondary sound buffer
            var capabilities = secondarySoundBuffer.Capabilities;

            // Lock the buffer
            DataStream dataPart2;
            var dataPart1 =secondarySoundBuffer.Lock(0, capabilities.BufferBytes,  LockFlags.EntireBuffer, out dataPart2);

            // Fill the buffer with some sound
            int numberOfSamples = capabilities.BufferBytes/waveFormat.BlockAlign;
            for (int i = 0; i < numberOfSamples; i++)
            {
                double vibrato = Math.Cos(2 * Math.PI * 10.0 * i /waveFormat.SampleRate);
                short value = (short) (Math.Cos(2*Math.PI*(220.0 + 4.0 * vibrato)*i/waveFormat.SampleRate)*16384); // Not too loud
                dataPart1.Write(value);
                dataPart1.Write(value);
            }

            // Unlock the buffer
            secondarySoundBuffer.Unlock(dataPart1, dataPart2);

            // Play the song
            secondarySoundBuffer.Play(0, PlayFlags.Looping);
           
            Application.Run(form);
        }
コード例 #5
0
ファイル: DXWavePlayer.cs プロジェクト: Ttxman/NanoTrans
        public DXWavePlayer(int device, int BufferByteSize, DataRequestDelegate fillProc)
        {
            if (BufferByteSize < 1000)
            {
                throw new ArgumentOutOfRangeException("BufferByteSize", "minimal size of buffer is 1000 bytes");
            }

            _buffersize = BufferByteSize;
            _requestproc = fillProc;
            var devices = DirectSound.GetDevices();
            if (device <= 0 || device >= devices.Count)
            {
                device = 0;
            }

            _outputDevice = new DirectSound(devices[device].DriverGuid);

            System.Windows.Interop.WindowInteropHelper wh = new System.Windows.Interop.WindowInteropHelper(Application.Current.MainWindow);
            _outputDevice.SetCooperativeLevel(wh.Handle, CooperativeLevel.Priority);

            _buffDescription = new SoundBufferDescription();
            _buffDescription.Flags = BufferFlags.ControlPositionNotify | BufferFlags.ControlFrequency | BufferFlags.ControlEffects | BufferFlags.GlobalFocus | BufferFlags.GetCurrentPosition2;
            _buffDescription.BufferBytes = BufferByteSize * InternalBufferSizeMultiplier;

            WaveFormat format = new WaveFormat(16000, 16, 1);

            _buffDescription.Format = format;

            _soundBuffer = new SecondarySoundBuffer(_outputDevice, _buffDescription);
            _synchronizer = new AutoResetEvent(false);

            NotificationPosition[] nots = new NotificationPosition[InternalBufferSizeMultiplier];

            NotificationPosition not;
            int bytepos = 800;
            for (int i = 0; i < InternalBufferSizeMultiplier; i++)
            {
                not = new NotificationPosition();
                not.Offset = bytepos;
                not.WaitHandle = _synchronizer;
                nots[i] = not;
                bytepos += BufferByteSize;
            }

            _soundBuffer.SetNotificationPositions(nots);

            _waitThread = new Thread(new ThreadStart(DataRequestThread)) { Name = "MyWavePlayer.DataRequestThread" };
            _waitThread.Start();
        }
コード例 #6
0
ファイル: AudioPlayer.cs プロジェクト: iejeecee/mediaviewer
        public AudioPlayer()
        {
            directSound = null;        

            audioBuffer = null;
            volume = 1;
            isMuted = false;

            pts = 0;
            offsetBytes = 0;
            ptsPos = 0;
            prevPtsPos = 0;
            playLoops = 0;
            ptsLoops = 0;
        }
コード例 #7
0
        public StreamingAudioBuffer(Windows.Forms.Control owner)
        {
            directSound = null;
            this.owner = owner;

            audioBuffer = null;
            volume = 1;
            muted = false;

            pts = 0;
            offsetBytes = 0;
            ptsPos = 0;
            prevPtsPos = 0;
            playLoops = 0;
            ptsLoops = 0;
        }
コード例 #8
0
 public Sounds()
 {
     try
     {
         Sounds.m_Device = new SharpDX.DirectSound.DirectSound();
         ((DirectSoundBase)Sounds.m_Device).SetCooperativeLevel(Engine.m_Display.Handle, (CooperativeLevel)2);
     }
     catch (Exception ex)
     {
         Debug.Trace("Error constructing sound factory");
         Debug.Error(ex);
         Sounds.m_Device = (SharpDX.DirectSound.DirectSound)null;
     }
     this._audioProvider = (IAudioProvider) new ManagedAudioProvider((IAudioProvider) new PhysicalAudioProvider(Sounds.m_Device));
     this.queue          = new BlockingCollection <Sounds.SoundRequest>((IProducerConsumerCollection <Sounds.SoundRequest>) new ConcurrentQueue <Sounds.SoundRequest>());
     this.worker         = this.SpawnWorker();
 }
コード例 #9
0
ファイル: AudioPlayer.cs プロジェクト: iejeecee/mediaviewer
        public AudioPlayer(System.Windows.Forms.Control owner)
        {
            directSound = null;
            this.owner = owner;

            audioBuffer = null;
            volume = 0;
            muted = false;

            pts = 0;
            offsetBytes = 0;
            ptsPos = 0;
            prevPtsPos = 0;
            playLoops = 0;
            ptsLoops = 0;

            
        }
コード例 #10
0
 private SecondarySoundBuffer CloneExistingBuffer(SharpDX.DirectSound.DirectSound soundDevice)
 {
     for (int index = 0; index < this._buffers.Count; ++index)
     {
         SecondarySoundBuffer secondarySoundBuffer1 = this._buffers[index];
         try
         {
             SecondarySoundBuffer secondarySoundBuffer2 = (SecondarySoundBuffer)soundDevice.DuplicateSoundBuffer((SoundBuffer)secondarySoundBuffer1);
             if (secondarySoundBuffer2 != null)
             {
                 return(secondarySoundBuffer2);
             }
         }
         catch
         {
         }
     }
     return((SecondarySoundBuffer)null);
 }
コード例 #11
0
 public void Dispose()
 {
     if (this.queue != null)
     {
         this.queue.CompleteAdding();
         this.worker.Wait();
         this.queue.Dispose();
         this.queue  = (BlockingCollection <Sounds.SoundRequest>)null;
         this.worker = (Task)null;
     }
     if (this._audioProvider != null)
     {
         this._audioProvider.Dispose();
     }
     if (Sounds.m_Device == null)
     {
         return;
     }
     ((DisposeBase)Sounds.m_Device).Dispose();
     Sounds.m_Device = (SharpDX.DirectSound.DirectSound)null;
 }
コード例 #12
0
ファイル: Sound.cs プロジェクト: nyx1220/sharpdx-examples
 protected virtual bool LoadAudioFile(string audioFile, DirectSound directSound)
 {
     return true;
 }
コード例 #13
0
ファイル: Sound.cs プロジェクト: nyx1220/sharpdx-examples
        void ShutdownDirectSound()
        {
            // Release the primary sound buffer pointer.
            if (_PrimaryBuffer != null)
            {
                _PrimaryBuffer.Dispose();
                _PrimaryBuffer = null;
            }

            // Release the direct sound interface pointer.
            if (_DirectSound != null)
            {
                _DirectSound.Dispose();
                _DirectSound = null;
            }
        }
コード例 #14
0
ファイル: AviSynthPlayer.cs プロジェクト: MaksHDR/xvid4psp
        public void Close()
        {
            lock (locker)
            {
                try
                {
                    //Video
                    if (thread_v != null)
                    {
                        IsAborted = true;
                        playing_v.Set();      //Снимаем с паузы PlayingLoop, чтоб там сработала проверка на IsAborted
                        processing.WaitOne(); //Ждем, пока обработается текущий кадр, если его считывание еще не закончилось
                        thread_v.Join();      //Дожидаемся окончания работы PlayingLoop
                        thread_v = null;
                    }
                    if (BitmapSource != null)
                    {
                        BitmapSource = null;
                    }
                    if (VBuffer != IntPtr.Zero)
                    {
                        if (IsInterop) UnmapViewOfFile(VBuffer);
                        else Marshal.FreeHGlobal(VBuffer);
                        VBuffer = IntPtr.Zero;
                    }
                    if (MemSection != IntPtr.Zero)
                    {
                        CloseHandle(MemSection);
                        MemSection = IntPtr.Zero;
                    }

                    //Audio
                    if (thread_a != null)
                    {
                        IsAborted = true;
                        playing_a.Set();
                        thread_a.Join();
                        thread_a = null;
                    }
                    if (AudioBuffer != null)
                    {
                        AudioBuffer.Dispose();
                        AudioBuffer = null;
                    }
                    if (AudioDevice != null)
                    {
                        AudioDevice.Dispose();
                        AudioDevice = null;
                    }
                }
                catch (Exception)
                {
                    IsError = true;
                    throw;
                }
                finally
                {
                    if (h.IsAllocated)
                        h.Free();

                    AdjustMediaTimer(0);

                    UnloadAviSynth();
                }
            }
        }
コード例 #15
0
        public void initialize(int samplesPerSecond, int bytesPerSample, int nrChannels,
            int bufferSizeBytes)
        {

            try
            {

                if (directSound == null)
                {

                    directSound = new DirectSound();
                    directSound.SetCooperativeLevel(owner.Handle, CooperativeLevel.Priority);
                }

                releaseResources();

                this.bufferSizeBytes = bufferSizeBytes;
                this.bytesPerSample = bytesPerSample;
                this.samplesPerSecond = samplesPerSecond;
                this.nrChannels = nrChannels;

                SoundBufferDescription desc = new SoundBufferDescription();
                desc.BufferBytes = bufferSizeBytes;
                desc.Flags = BufferFlags.Defer | BufferFlags.GlobalFocus |
                    BufferFlags.ControlVolume | BufferFlags.ControlFrequency |
                    BufferFlags.GetCurrentPosition2;

                //desc.AlgorithmFor3D = Guid.Empty;

                int blockAlign = nrChannels * bytesPerSample;
                int averageBytesPerSecond = samplesPerSecond * blockAlign;

                WaveFormat format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm,
                    samplesPerSecond, nrChannels, averageBytesPerSecond, blockAlign, bytesPerSample * 8);

                desc.Format = format;
                //desc.Format.Encoding = WaveFormatEncoding.Pcm;
                /*
                            desc.Format.SampleRate = samplesPerSecond;
                            desc.Format.BitsPerSample = bytesPerSample * 8;
                            desc.Format.Channels = nrChannels;
                            desc.Format.FormatTag = WaveFormatTag.Pcm;
                            desc.Format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8));
                            desc.Format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign;
                */

                /*desc.DeferLocation = true;
                desc.GlobalFocus = true;
                desc.ControlVolume = true;
                desc.CanGetCurrentPosition = true;
                desc.ControlFrequency = true;*/

                silence = new byte[bufferSizeBytes];
                Array.Clear(silence, 0, silence.Length);

                audioBuffer = new SecondarySoundBuffer(directSound, desc);

                Volume = volume;
                offsetBytes = 0;
                prevPlayPos = 0;
                ptsPos = 0;
                prevPtsPos = 0;
                playLoops = 0;
                ptsLoops = 0;

                log.Info("Direct Sound Initialized");

            }
            catch (SharpDX.SharpDXException e)
            {
                log.Error("Error initializing Direct Sound", e);
                MessageBox.Show("Error initializing Direct Sound: " + e.Message, "Direct Sound Error");
            }
            catch (Exception e)
            {
                log.Error("Error initializing Direct Sound", e);
            }
        }
コード例 #16
0
ファイル: MainWindow.cs プロジェクト: Zastai/POLUtils
 private void PlayFile(FileInfo FI)
 {
     lock (this) {
     if (this.DS == null) {
       this.DS = new DirectSound();
       this.DS.SetCooperativeLevel(this.Handle, CooperativeLevel.Normal);
     }
     this.StopPlayback();
     var bd = new SoundBufferDescription {
       Format      = new WaveFormat(FI.AudioFile.SampleRate, 16, FI.AudioFile.Channels),
       BufferBytes = this.AudioBufferSize,
       Flags       = BufferFlags.GlobalFocus | BufferFlags.StickyFocus | BufferFlags.ControlVolume | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlPositionNotify
     };
     this.CurrentBuffer = new SecondarySoundBuffer(this.DS, bd);
     if (this.AudioUpdateTrigger == null)
       this.AudioUpdateTrigger = new AutoResetEvent(false);
     var chunkSize = this.AudioBufferSize / this.AudioBufferMarkers;
     var updatePositions = new NotificationPosition[this.AudioBufferMarkers];
     for (var i = 0; i < this.AudioBufferMarkers; ++i) {
       updatePositions[i] = new NotificationPosition() {
     WaitHandle = this.AudioUpdateTrigger,
     Offset = chunkSize * i
       };
     }
     this.CurrentBuffer.SetNotificationPositions(updatePositions);
     this.CurrentStream = FI.AudioFile.OpenStream();
     {
       var bytes = new byte[this.CurrentBuffer.Capabilities.BufferBytes];
       var readbytes = this.CurrentStream.Read(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes);
       if (readbytes < this.CurrentBuffer.Capabilities.BufferBytes)
     Array.Clear(bytes, readbytes, this.CurrentBuffer.Capabilities.BufferBytes - readbytes);
       DataStream audiodata2;
       var audiodata1 = this.CurrentBuffer.Lock(0, this.CurrentBuffer.Capabilities.BufferBytes, LockFlags.EntireBuffer, out audiodata2);
       audiodata1.Write(bytes, 0, this.CurrentBuffer.Capabilities.BufferBytes);
       this.CurrentBuffer.Unlock(audiodata1, audiodata2);
     }
     if (this.CurrentStream.Position < this.CurrentStream.Length) {
       this.AudioUpdateTrigger.Reset();
       this.AudioUpdateThread = new Thread(this.AudioUpdate);
       this.AudioUpdateThread.Start();
       this.btnPause.Enabled = true;
       this.btnStop.Enabled = true;
       this.AudioIsLooping = true;
     }
     else {
       this.CurrentStream.Close();
       this.CurrentStream = null;
       this.AudioIsLooping = false;
     }
     this.CurrentBuffer.Play(0, (this.AudioIsLooping ? PlayFlags.Looping : PlayFlags.None));
       }
 }
コード例 #17
0
ファイル: AviSynthPlayer.cs プロジェクト: MaksHDR/xvid4psp
        private void SetUpAudioDevice()
        {
            if (AudioDevice == null)
            {
                AudioDevice = new DirectSound();
                AudioDevice.SetCooperativeLevel(new WindowInteropHelper(Owner).Handle, CooperativeLevel.Normal);
            }

            if (AudioBuffer == null)
            {
                BufferDesc = new SoundBufferDescription();
                BufferDesc.Flags = BufferFlags.GlobalFocus | BufferFlags.GetCurrentPosition2 | BufferFlags.ControlVolume;
                BufferDesc.AlgorithmFor3D = Guid.Empty;

                //Вывод звука в Ависинте (через DirectShow\VFW)
                //v2.57   |   global OPT_AllowFloatAudio = True (по дефолту FLOAT преобразуется в 16бит, а 32бит и 24бит - выводятся как есть)
                //v2.58   |   global OPT_UseWaveExtensible = True (по дефолту WaveFormatExtensible не используется, даже для многоканального и многобитного звука)
                //v2.60   |   global OPT_dwChannelMask(int v) (переназначение дефолтной конфигурации каналов при использовании WaveFormatExtensible)
                //FFCHANNEL_LAYOUT в FFMS2

                if (reader.GetVarBoolean("OPT_UseWaveExtensible", true)) //У нас свой дефолт
                {
                    #region WaveFormatExtensible
                    WaveFormatExtensible format = new WaveFormatExtensible(reader.Samplerate, reader.BitsPerSample, reader.Channels);

                    //SharpDX считает, что весь 32-битный звук - FLOAT
                    if (reader.Clip.SampleType == AudioSampleType.INT32)
                        format.GuidSubFormat = new Guid("00000001-0000-0010-8000-00aa00389b71"); //PCM

                    #region channels
                    //AviSynth (дефолт)
                    //Chan. Mask MS channels
                    //----- ------ -----------------------
                    //1   0x0004 FC                      4
                    //2   0x0003 FL FR                   3
                    //3   0x0007 FL FR FC                7
                    //4   0x0033 FL FR BL BR             51
                    //5   0x0037 FL FR FC BL BR          55
                    //6   0x003F FL FR FC LF BL BR       63
                    //7   0x013F FL FR FC LF BL BR BC    319
                    //8   0x063F FL FR FC LF BL BR SL SR 1599

                    int mask = reader.GetVarInteger("OPT_dwChannelMask", -1);
                    if (mask != -1) format.ChannelMask = (Speakers)mask;
                    else if (reader.Channels == 1) format.ChannelMask = Speakers.Mono; //4
                    //else if (reader.Channels == 2) format.ChannelMask = Speakers.Stereo; //3
                    else if (reader.Channels == 3) format.ChannelMask = Speakers.Stereo | Speakers.FrontCenter; //7 //TwoPointOne; //11
                    else if (reader.Channels == 4) format.ChannelMask = Speakers.Quad; //51
                    else if (reader.Channels == 5) format.ChannelMask = Speakers.Quad | Speakers.FrontCenter; //55  //FourPointOne; //59
                    //else if (reader.Channels == 6) format.ChannelMask = Speakers.FivePointOne; //63
                    else if (reader.Channels == 7) format.ChannelMask = Speakers.FivePointOne | Speakers.BackCenter; //319
                    else if (reader.Channels == 8) format.ChannelMask = Speakers.SevenPointOneSurround; //1599  //SevenPointOne; //255
                    /*else //Этот способ уже был использован при вызове конструктора, хз насколько он корректный и насколько корректно всё то, что выше..
                    {
                        //NAudio\SharpDX
                        int dwChannelMask = 0;
                        for (int n = 0; n < 1; n++) dwChannelMask |= (1 << n);
                        format.ChannelMask = (Speakers)dwChannelMask;

                        //ch mask (SlimDX) [SharpDX]
                        //1    1 (FrontLeft) [FrontLeft]
                        //2    3 (Stereo) [FrontLeft | FrontRight]
                        //3    7 (Mono) [FrontLeft | FrontRight | FrontCenter]
                        //4   15 (Mono) [FrontLeft | FrontRight | FrontCenter | LowFrequency]
                        //5   31 (TwoPointOne | Mono | BackLeft) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft]
                        //6   63 (FivePointOne) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft | BackRight]
                        //7  127 (FivePointOne | FrontLeftOfCenter) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft | BackRight | FrontLeftOfCenter]
                        //8  255 (SevenPointOne) [FrontLeft | FrontRight | FrontCenter | LowFrequency | BackLeft | BackRight | FrontLeftOfCenter | FrontRightOfCenter]
                    }*/
                    #endregion

                    samplesPerHalfBuff = (int)((format.SampleRate / 2) * k); //Кол-во сэмплов на половину буфера
                    bytesPerSample = format.BlockAlign;

                    BufferDesc.BufferBytes = samplesPerHalfBuff * format.BlockAlign * 2; //Кол-во байт на полный буфер
                    BufferDesc.Format = format;
                    #endregion
                }
                else
                {
                    #region WaveFormat
                    WaveFormatEncoding tag = (reader.Clip.SampleType == AudioSampleType.FLOAT) ? WaveFormatEncoding.IeeeFloat : WaveFormatEncoding.Pcm;
                    WaveFormat format = WaveFormat.CreateCustomFormat(tag, reader.Samplerate, reader.Channels, reader.Clip.AvgBytesPerSec, reader.Channels * reader.Clip.BytesPerSample, reader.BitsPerSample);

                    samplesPerHalfBuff = (int)((format.SampleRate / 2) * k); //Кол-во сэмплов на половину буфера
                    bytesPerSample = format.BlockAlign;

                    BufferDesc.BufferBytes = samplesPerHalfBuff * format.BlockAlign * 2; //Кол-во байт на полный буфер
                    BufferDesc.Format = format;
                    #endregion
                }

                AudioBuffer = new SecondarySoundBuffer(AudioDevice, BufferDesc);

                if (ABuffer == null)
                    ABuffer = new byte[BufferDesc.BufferBytes / 2];

                if (!h.IsAllocated)
                    h = GCHandle.Alloc(ABuffer, GCHandleType.Pinned);
            }
        }
コード例 #18
0
ファイル: AudioPlayer.cs プロジェクト: iejeecee/mediaviewer
        public void initialize(int samplesPerSecond, int bytesPerSample, int nrChannels,
            int bufferSizeBytes)
        {

            try
            {
                if (directSound == null)
                {
                    directSound = new DirectSound();
                    directSound.SetCooperativeLevel(owner.Handle, CooperativeLevel.Priority);                
                }
             
                releaseResources();

                this.bufferSizeBytes = bufferSizeBytes;
                this.bytesPerSample = bytesPerSample;
                this.samplesPerSecond = samplesPerSecond;
                this.nrChannels = nrChannels;

                SoundBufferDescription desc = new SoundBufferDescription();
                desc.BufferBytes = bufferSizeBytes;
                desc.Flags = BufferFlags.Defer | BufferFlags.GlobalFocus |
                    BufferFlags.ControlVolume | BufferFlags.ControlFrequency |
                    BufferFlags.GetCurrentPosition2;
               
                //desc.AlgorithmFor3D = Guid.Empty;

                int blockAlign = nrChannels * bytesPerSample;
                int averageBytesPerSecond = samplesPerSecond * blockAlign;

                WaveFormat format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm,
                    samplesPerSecond, nrChannels, averageBytesPerSecond, blockAlign, bytesPerSample * 8);

                desc.Format = format;
           
                silence = new char[bufferSizeBytes];
                Array.Clear(silence, 0, silence.Length);

                audioBuffer = new SecondarySoundBuffer(directSound, desc);

                Volume = volume;
                offsetBytes = 0;
                prevPlayPos = 0;
                ptsPos = 0;
                prevPtsPos = 0;
                playLoops = 0;
                ptsLoops = 0;

                //log.Info("Direct Sound Initialized");

            }       
            catch (Exception e)
            {
                throw new VideoPlayerException("Error initializing Direct Sound: " + e.Message, e);             
            }
        }
コード例 #19
0
        /// <summary>Disposes resources.</summary>
        public override void Dispose()
        {
            _control = null;

            if(_directSound != null)
            {
                try { _directSound.Dispose(); }
                catch { }
                _directSound = null;
            }
        }
コード例 #20
0
ファイル: WaveSound.cs プロジェクト: nyx1220/sharpdx-examples
        protected override bool LoadAudioFile(string audioFile, DirectSound directSound)
        {
            try
            {
                // Open the wave file in binary.
                var reader = new BinaryReader(File.OpenRead(SystemConfiguration.DataFilePath + audioFile));

                // Read in the wave file header.
                chunkId = new string(reader.ReadChars(4));
                chunkSize = reader.ReadInt32();
                format = new string(reader.ReadChars(4));
                subChunkId = new string(reader.ReadChars(4));
                subChunkSize = reader.ReadInt32();
                audioFormat = (WaveFormatEncoding )reader.ReadInt16();
                numChannels = reader.ReadInt16();
                sampleRate = reader.ReadInt32();
                bytesPerSecond = reader.ReadInt32();
                blockAlign = reader.ReadInt16();
                bitsPerSample = reader.ReadInt16();
                dataChunkId = new string(reader.ReadChars(4));
                dataSize = reader.ReadInt32();

                // Check that the chunk ID is the RIFF format
                // and the file format is the WAVE format
                // and sub chunk ID is the fmt format
                // and the audio format is PCM
                // and the wave file was recorded in stereo format
                // and at a sample rate of 44.1 KHz
                // and at 16 bit format
                // and there is the data chunk header.
                // Otherwise return false.
                if (chunkId != "RIFF" || format != "WAVE" || subChunkId.Trim() != "fmt" || audioFormat != WaveFormatEncoding.Pcm || numChannels != 2 || sampleRate != 44100 || bitsPerSample != 16 || dataChunkId != "data")
                    return false;

                // Set the buffer description of the secondary sound buffer that the wave file will be loaded onto and the wave format.
                var buffer = new SoundBufferDescription();
                buffer.Flags = BufferFlags.ControlVolume;
                buffer.BufferBytes = dataSize;
                buffer.Format = new WaveFormat(44100, 16, 2);
                buffer.AlgorithmFor3D = Guid.Empty;

                // Create a temporary sound buffer with the specific buffer settings.
                _SecondaryBuffer = new SecondarySoundBuffer(directSound, buffer);

                // Read in the wave file data into the temporary buffer.
                var waveData = reader.ReadBytes(dataSize);

                // Close the reader
                reader.Close();

                // Lock the secondary buffer to write wave data into it.
                DataStream waveBufferData2;
                var waveBufferData1 = _SecondaryBuffer.Lock(0, dataSize, LockFlags.None, out waveBufferData2);

                // Copy the wave data into the buffer.
                waveBufferData1.Write(waveData, 0, dataSize);

                // Unlock the secondary buffer after the data has been written to it.
                var result = _SecondaryBuffer.Unlock(waveBufferData1, waveBufferData2);

                if (result != Result.Ok)
                    return false;
            }
            catch
            {
                return false;
            }

            return true;
        }
コード例 #21
0
ファイル: AudioPlayer.cs プロジェクト: iejeecee/mediaviewer
        protected virtual void Dispose(bool safe)
        {
            if (safe)
            {
                if (audioBuffer != null)
                {
                    audioBuffer.Dispose();
                    audioBuffer = null;
                }

                if (directSound != null)
                {
                    directSound.Dispose();
                    directSound = null;
                }
            }
        }
コード例 #22
0
 /// <summary>Initialises the sound system.</summary>
 public override void Initialise()
 {
     _directSound = new DirectSound();
     _directSound.SetCooperativeLevel(_control.Handle, CooperativeLevel.Priority);
 }
コード例 #23
0
        /// <summary>
        ///   Constructs a new Audio Output Device.
        /// </summary>
        /// 
        /// <param name="device">Global identifier of the audio output device.</param>
        /// <param name="owner">The owner window handle.</param>
        /// <param name="samplingRate">The sampling rate of the device.</param>
        /// <param name="channels">The number of channels of the device.</param>
        /// 
        public AudioOutputDevice(Guid device, IntPtr owner, int samplingRate, int channels)
        {
            this.owner = owner;
            this.samplingRate = samplingRate;
            this.channels = channels;
            this.device = device;

            DirectSound ds = new DirectSound(device);
            ds.SetCooperativeLevel(owner, CooperativeLevel.Priority);


            // Set the output format
            WaveFormat waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(samplingRate, channels);
            bufferSize = 8 * waveFormat.AverageBytesPerSecond;


            // Setup the secondary buffer
            SoundBufferDescription desc2 = new SoundBufferDescription();
            desc2.Flags =
                BufferFlags.GlobalFocus |
                BufferFlags.ControlPositionNotify |
                BufferFlags.GetCurrentPosition2;
            desc2.BufferBytes = bufferSize;
            desc2.Format = waveFormat;

            buffer = new SecondarySoundBuffer(ds, desc2);


            var list = new List<NotificationPosition>();
            int numberOfPositions = 32;

            // Set notification for buffer percentiles
            for (int i = 0; i < numberOfPositions; i++)
            {
                list.Add(new NotificationPosition()
                {              
                    WaitHandle = new AutoResetEvent(false),
                    Offset = i * bufferSize / numberOfPositions + 1,
                });
            }

            // Set notification for end of buffer
            list.Add(new NotificationPosition()
            {
                Offset = bufferSize - 1,
                WaitHandle = new AutoResetEvent(false)
            });

            firstHalfBufferIndex = numberOfPositions / 2;
            secondHalfBufferIndex = numberOfPositions;

            notifications = list.ToArray();

            System.Diagnostics.Debug.Assert(notifications[firstHalfBufferIndex].Offset == bufferSize / 2 + 1);
            System.Diagnostics.Debug.Assert(notifications[secondHalfBufferIndex].Offset == bufferSize - 1);

            // Make a copy of the wait handles
            waitHandles = new WaitHandle[notifications.Length];
            for (int i = 0; i < notifications.Length; i++)
                waitHandles[i] = notifications[i].WaitHandle;

            // Store all notification positions
            buffer.SetNotificationPositions(notifications);
        }