Exemple #1
0
        private void DirectSound_Start(object sender, EventArgs e)
        {
            soundStream = new SoundStream(File.OpenRead(loadFilePath));
            WaveFormat format = soundStream.Format;

            AudioBuffer buffer = new AudioBuffer
            {
                Stream     = soundStream.ToDataStream(),
                AudioBytes = (int)soundStream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            soundStream.Close();

            sourceVoice = new SourceVoice(xAudio2, format, true);
            sourceVoice.SubmitSourceBuffer(buffer, soundStream.DecodedPacketsInfo);
            sourceVoice.Start();

            if (directSoundEffect == 0)
            {
                SharpDX.XAPO.Fx.Echo effectEcho       = new SharpDX.XAPO.Fx.Echo(xAudio2);
                EffectDescriptor     effectDescriptor = new EffectDescriptor(effectEcho);
                sourceVoice.SetEffectChain(effectDescriptor);
                sourceVoice.EnableEffect(0);
            }
            else if (directSoundEffect == 1)
            {
                SharpDX.XAPO.Fx.Reverb effectReverb     = new SharpDX.XAPO.Fx.Reverb(xAudio2);
                EffectDescriptor       effectDescriptor = new EffectDescriptor(effectReverb);
                sourceVoice.SetEffectChain(effectDescriptor);
                sourceVoice.EnableEffect(0);
            }
        }
Exemple #2
0
        private MyWave LoadSound(string name)
        {
            if (name.IndexOf(".wav", StringComparison.Ordinal) == -1)
            {
                name = Path.Combine(soundsDir, $"{name}.wav");
            }

            var fileInfo = new FileInfo(name);

            if (!fileInfo.Exists)
            {
                return(null);
            }
            var soundStream = new SoundStream(File.OpenRead(name));
            var waveFormat  = soundStream.Format;

            var buffer = new AudioBuffer
            {
                Stream = soundStream.ToDataStream(), AudioBytes = (int)soundStream.Length, Flags = BufferFlags.EndOfStream
            };

            soundStream.Close();
            var wave = new MyWave {
                Buffer = buffer, WaveFormat = waveFormat, DecodedPacketsInfo = soundStream.DecodedPacketsInfo
            };

            Sounds[fileInfo.Name.Split('.').First()] = wave;
            Sounds[fileInfo.Name] = wave;
            return(wave);
        }
Exemple #3
0
        private void Load()
        {
            m_SoundStream = new SoundStream(Sound.Stream);
            var waveFormat = m_SoundStream.Format;

            m_AudioBuffer = new AudioBuffer
            {
                Stream     = m_SoundStream.ToDataStream(),
                AudioBytes = (int)m_SoundStream.Length,
                Flags      = BufferFlags.EndOfStream
            };
            m_SoundStream.Close();

            m_Audio            = new SourceVoice(m_Device, waveFormat, true);
            m_Audio.BufferEnd += (context) =>
            {
                if (Background)
                {
                    if (IsPlaying)
                    {
                        m_Audio.SubmitSourceBuffer(m_AudioBuffer, m_SoundStream.DecodedPacketsInfo);
                        m_Audio.Start();
                    }
                }
                else
                {
                    m_PlaySync.Signal();
                    IsPlaying = false;
                }
            };
        }
        public EffectSound(string filename)
        {
            lock (loadedSounds)
            {
                EffectSound existingSound;
                if (loadedSounds.TryGetValue(filename, out existingSound))
                {
                    Stream = existingSound.Stream;
                    Buffer = existingSound.Buffer;
                    return;
                }
            }

            using (var fileStream = File.OpenRead(filename))
            {
                Stream = new SoundStream(fileStream);
                Buffer = new AudioBuffer
                {
                    Stream     = Stream.ToDataStream(),
                    AudioBytes = (int)Stream.Length,
                    Flags      = BufferFlags.EndOfStream
                };
                Stream.Close();
            }

            lock (loadedSounds)
            {
                loadedSounds[filename] = this;
            }
        }
Exemple #5
0
        public MyInMemoryWave(MySoundData cue, string path, MyWaveBank owner, bool streamed = false)
        {
            using (var stream = MyFileSystem.OpenRead(path))
            {
                m_owner      = owner;
                m_path       = path;
                m_stream     = new SoundStream(stream);
                m_waveFormat = m_stream.Format;
                m_buffer     = new AudioBuffer
                {
                    Stream     = m_stream.ToDataStream(),
                    AudioBytes = (int)m_stream.Length,
                    Flags      = BufferFlags.None
                };

                if (cue.Loopable)
                {
                    m_buffer.LoopCount = AudioBuffer.LoopInfinite;
                }

                m_stream.Close();

                Streamed = streamed;
            }
        }
Exemple #6
0
        /// <summary>
        ///   Worker thread.
        /// </summary>
        ///
        private void WorkerThread()
        {
            SoundStream waveStream = null;

            try
            {
                waveStream = (stream != null)
                    ? new SoundStream(stream)
                    : new SoundStream(File.OpenRead(fileName));

                // Open the Wave stream
                decoder.Open(waveStream);

                while (stopEvent.WaitOne(0, false))
                {
                    // get next frame
                    Signal s = decoder.Decode(frameSize);
                    framesReceived += s.Length;
                    bytesReceived  += decoder.Bytes;

                    if (NewFrame != null)
                    {
                        NewFrame(this, new NewFrameEventArgs(s));
                    }

                    // check current position
                    if (waveStream.Position >= waveStream.Length)
                    {
                        break;
                    }

                    // sleeping ...
                    Thread.Sleep(100);
                }
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (AudioSourceError != null)
                {
                    AudioSourceError(this, new AudioSourceErrorEventArgs(exception.Message));
                }
                else
                {
                    throw;
                }
            }

            if (waveStream != null)
            {
                waveStream.Close();
                waveStream.Dispose();
                waveStream = null;
            }
        }
Exemple #7
0
 public Audio(String fileName)
 {
     device         = new XAudio2();
     masteringVoice = new MasteringVoice(device);
     stream         = new SoundStream(File.OpenRead("Content/" + fileName));
     buffer         = new AudioBuffer {
         Stream     = stream.ToDataStream(),
         AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream
     };
     stream.Close();
 }
Exemple #8
0
 public CachedSound(string FileName)
 {
     SoundStream = new SoundStream(File.OpenRead("Ressources\\Sound\\" + FileName + ".wav"));
     Buffer      = new AudioBuffer
     {
         Stream     = SoundStream.ToDataStream(),
         AudioBytes = (int)SoundStream.Length,
         Flags      = BufferFlags.EndOfStream
     };
     SoundStream.Close();
 }
        /// <summary>
        ///   Worker thread.
        /// </summary>
        ///
        private void WorkerThread()
        {
            this.shouldStop = false;

            SoundStream waveStream = null;

            try
            {
                waveStream = (stream != null) ?
                             new SoundStream(stream) : new SoundStream(File.OpenRead(fileName));

                // Open the Wave stream
                decoder.Open(waveStream);

                Signal signal = null;
                while (!this.shouldStop)
                {
                    // get next frame, overwriting the previous
                    signal          = decoder.Decode(frameSize, signal);
                    framesReceived += signal.NumberOfFrames;
                    bytesReceived  += decoder.NumberOfBytesRead;

                    if (NewFrame != null)
                    {
                        NewFrame(this, new NewFrameEventArgs(signal));
                    }

                    // check current position
                    if (waveStream.Position >= waveStream.Length)
                    {
                        this.shouldStop = true;
                    }
                }
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (AudioSourceError == null)
                {
                    throw;
                }

                AudioSourceError(this, new AudioSourceErrorEventArgs(exception));
            }

            if (waveStream != null)
            {
                waveStream.Close();
                waveStream.Dispose();
                waveStream = null;
            }
        }
Exemple #10
0
        static private Tuple <WaveFormat, AudioBuffer, uint[], XAudio2> loadFile(string path)
        {
            var XAudio         = new XAudio2();
            var MasteringVoice = new MasteringVoice(XAudio);
            var stream         = new SoundStream(File.OpenRead(path));
            var waveFormat     = stream.Format;
            var buffer         = new AudioBuffer
            {
                Stream     = stream.ToDataStream(),
                AudioBytes = (int)stream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            stream.Close();
            return(new Tuple <WaveFormat, AudioBuffer, uint[], XAudio2>(waveFormat, buffer, stream.DecodedPacketsInfo, XAudio));
        }
Exemple #11
0
        private void InitializeAudio(AwcAudio audio, float playBegin = 0)
        {
            currentAudio = audio;
            trackLength  = audio.Length;

            if (xAudio2 == null)
            {
                xAudio2        = new XAudio2();
                masteringVoice = new MasteringVoice(xAudio2);
            }

            Stream      wavStream   = audio.GetWavStream();
            SoundStream soundStream = new SoundStream(wavStream);

            audioBuffer = new AudioBuffer
            {
                Stream     = soundStream.ToDataStream(),
                AudioBytes = (int)soundStream.Length,
                Flags      = BufferFlags.EndOfStream
            };
            if (playBegin > 0)
            {
                audioBuffer.PlayBegin = (int)(soundStream.Format.SampleRate * playBegin) / 128 * 128;
                if (playtime.IsRunning)
                {
                    playtime.Restart();
                }
                else
                {
                    playtime.Reset();
                }
                playBeginMs = (int)(playBegin * 1000);
            }
            else
            {
                playBeginMs = 0;
            }
            soundStream.Close();
            wavStream.Close();

            trackFinished = false;
            sourceVoice   = new SourceVoice(xAudio2, soundStream.Format, true);
            sourceVoice.SubmitSourceBuffer(audioBuffer, soundStream.DecodedPacketsInfo);
            sourceVoice.BufferEnd += (context) => trackFinished = true;
            sourceVoice.SetVolume((float)VolumeTrackBar.Value / 100);
        }
Exemple #12
0
        public void PlayFX(System.IO.Stream resource)
        {
            var stream     = new SoundStream(resource);
            var waveFormat = stream.Format;
            var buffer     = new AudioBuffer
            {
                Stream     = stream.ToDataStream(),
                AudioBytes = (int)stream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            stream.Close();

            var sourceVoice = new SourceVoice(xaudio2, waveFormat, true);

            sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo);
            sourceVoice.Start();
        }
Exemple #13
0
        /// <summary>
        /// Loads a wave file into a SourceVoice.
        /// </summary>
        /// <param name="FileName">The path of the file to load.</param>
        /// <param name="device">The XAudio2 device to load the sound on.</param>
        /// <param name="notificationsSupport">True to enable receiving notifications on this buffer, false otherwise. A notification might include an event when this buffer starts processing data, or when the buffer has finished playing. Set this parameter to true if you wish to receive a notification when the buffer is done playing by means of the function passed to setOnEnd.</param>
        /// <returns>A populated ExtendedAudioBuffer.</returns>
        public static ExtendedAudioBuffer LoadSound(string FileName, XAudio2 device, bool notificationsSupport)
        {
            if (!File.Exists(FileName))
            {
                throw (new ArgumentException("The sound " + FileName + " could not be found."));
            }
            SoundStream stream = new SoundStream(File.OpenRead(FileName));
            WaveFormat  format = stream.Format;            // So we don't lose reference to it when we close the stream.
            AudioBuffer buffer = new AudioBuffer {
                Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = SharpDX.XAudio2.BufferFlags.EndOfStream
            };

            // We can now safely close the stream.
            stream.Close();
            SourceVoice sv = new SourceVoice(device, format, VoiceFlags.None, 5.0f, notificationsSupport);

            return(new ExtendedAudioBuffer(buffer, sv));
        }
        public MyInMemoryWave(MyObjectBuilder_CueDefinition cue, string filename)
        {
            m_stream     = new SoundStream(File.OpenRead(filename));
            m_waveFormat = m_stream.Format;
            m_buffer     = new AudioBuffer
            {
                Stream     = m_stream.ToDataStream(),
                AudioBytes = (int)m_stream.Length,
                Flags      = BufferFlags.None
            };

            if (cue.Loopable)
            {
                m_buffer.LoopCount = AudioBuffer.LoopInfinite;
            }

            m_stream.Close();
        }
Exemple #15
0
        public SoundEffectVoice(XAudio2 device, string soundFileName, float volume)
        {
            _audioDevice = device;
            _baseVolume  = volume;

            // TODO: This could be optimised: cache & copy the audio file bytes?
            _stream = new SoundStream(File.OpenRead(soundFileName));
            var format = _stream.Format;

            _audioBuffer = new AudioBuffer
            {
                Stream     = _stream.ToDataStream(),
                AudioBytes = (int)_stream.Length,
                Flags      = BufferFlags.EndOfStream,
            };
            _stream.Close();

            _voice            = new SourceVoice(_audioDevice, format, true);
            _voice.BufferEnd += VoiceBufferEnd;
        }
Exemple #16
0
        public void LoadAudio(params AwcStream[] audios)
        {
            if (xAudio2 == null)
            {
                xAudio2        = new XAudio2();
                masteringVoice = new MasteringVoice(xAudio2);
            }

            if ((voices == null) || (voices.Length != audios.Length))
            {
                voices = new AudioVoice[audios.Length];
                for (int i = 0; i < audios.Length; i++)
                {
                    voices[i] = new AudioVoice();
                }
            }

            trackLength = 0;
            for (int i = 0; i < audios.Length; i++)
            {
                var voice = voices[i];
                var audio = audios[i];
                if (audio != voice.audio)
                {
                    voice.audio       = audio;
                    voice.trackLength = audio.Length;
                    trackLength       = Math.Max(trackLength, voice.trackLength);
                    var wavStream   = audio.GetWavStream();
                    var soundStream = new SoundStream(wavStream);
                    voice.soundStream = soundStream;
                    voice.audioBuffer = new AudioBuffer
                    {
                        Stream     = soundStream.ToDataStream(),
                        AudioBytes = (int)soundStream.Length,
                        Flags      = BufferFlags.EndOfStream
                    };
                    soundStream.Close();
                    wavStream.Close();
                }
            }
        }
        public static void playSoundFile(string filename)
        {
            var ss         = new SoundStream(File.OpenRead(filename));
            var waveFormat = ss.Format;
            var ab         = new AudioBuffer
            {
                Stream     = ss.ToDataStream(),
                AudioBytes = (int)ss.Length,
                Flags      = BufferFlags.EndOfStream
            };

            ss.Close();

            var sv = new SourceVoice(xa2, waveFormat, true);

            //sv.BufferEnd += (context) => Console.WriteLine(" => event received: end of buffer");
            //sv.StreamEnd += () => finishPlaying(sv, ab);
            sv.SubmitSourceBuffer(ab, ss.DecodedPacketsInfo);
            sv.Start();
            sources.Add(sv, ab);
        }
Exemple #18
0
        /// <summary>
        /// Play a sound file. Supported format are Wav(pcm+adpcm) and XWMA
        /// </summary>
        /// <param name="device">The device.</param>
        /// <param name="text">Text to display</param>
        /// <param name="fileName">Name of the file.</param>
        static void PLaySoundFile(XAudio2 device, string text, string fileName)
        {
            Console.WriteLine("{0} => {1} (Press esc to skip)", text, fileName);
            var stream     = new SoundStream(File.OpenRead(fileName));
            var waveFormat = stream.Format;
            var buffer     = new AudioBuffer
            {
                Stream     = stream.ToDataStream(),
                AudioBytes = (int)stream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            stream.Close();

            var sourceVoice = new SourceVoice(device, waveFormat, true);

            // Adds a sample callback to check that they are working on source voices
            sourceVoice.BufferEnd += (context) => Console.WriteLine(" => event received: end of buffer");
            sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo);
            sourceVoice.Start();

            int count = 0;

            while (sourceVoice.State.BuffersQueued > 0 && !IsKeyPressed(ConsoleKey.Escape))
            {
                if (count == 50)
                {
                    Console.Write(".");
                    Console.Out.Flush();
                    count = 0;
                }
                Thread.Sleep(10);
                count++;
            }
            Console.WriteLine();

            sourceVoice.DestroyVoice();
            sourceVoice.Dispose();
            buffer.Stream.Dispose();
        }
Exemple #19
0
 /// <inheritdoc />
 public int LoadSound(Stream stream)
 {
     using (SoundStream soundStream = new SoundStream(stream))
     {
         AudioBuffer audioBuffer = new AudioBuffer
         {
             Stream     = soundStream.ToDataStream(),
             AudioBytes = (int)soundStream.Length,
             Flags      = BufferFlags.EndOfStream
         };
         soundStream.Close();
         _soundBuffer.Add(
             _soundBufferIndex,
             new SoundBuffer
         {
             AudioBuffer        = audioBuffer,
             Format             = soundStream.Format,
             DecodedPacketsInfo = soundStream.DecodedPacketsInfo
         });
     }
     return(_soundBufferIndex++);
 }
Exemple #20
0
        public static void PlayXAudioSound(object soundFile)
        {
            try
            {
                xaudio2        = new XAudio2();
                masteringVoice = new MasteringVoice(xaudio2);

                var stream     = new SoundStream(File.OpenRead(soundFile as string));
                var waveFormat = stream.Format;
                buffer = new AudioBuffer
                {
                    Stream     = stream.ToDataStream(),
                    AudioBytes = (int)stream.Length,
                    Flags      = BufferFlags.EndOfStream
                };
                stream.Close();

                sourceVoice = new SourceVoice(xaudio2, waveFormat, true);
                sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo);
                sourceVoice.Start();

                while (sourceVoice.State.BuffersQueued > 0)
                {
                    Thread.Sleep(1);
                }

                sourceVoice.DestroyVoice();
                sourceVoice.Dispose();
                sourceVoice = null;
                buffer.Stream.Dispose();

                xaudio2.Dispose();
                masteringVoice.Dispose();
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
Exemple #21
0
        public WavePlayer(string path)
        {
            WavePath = path;
            DicPath  = path;
            FileInfo fi = new FileInfo(path);

            if (!fi.Exists)
            {
                WavePath = Path.Combine(Domain.DefaultPath, "default.wav");
            }
            _stream = new SoundStream(File.OpenRead(WavePath));
            WaveFormat waveFormat = _stream.Format;

            _buffer = new AudioBuffer
            {
                Stream     = _stream.ToDataStream(),
                AudioBytes = (int)_stream.Length,
                Flags      = BufferFlags.EndOfStream
            };
            _stream.Close();
            _sourceVoice            = new SourceVoice(Device, waveFormat, true);
            _sourceVoice.BufferEnd += OnSourceVoiceOnBufferEnd;
        }
        /// <summary>
        ///   Closes the underlying stream.
        /// </summary>
        ///
        public void Close()
        {
#if !NETSTANDARD1_4
            waveStream.Close();
#endif
        }
        public SoundBufferedDataSource(FileInfo FileName)
        {
            //Extract the data from the sound file, and create a buffer with them

            //Creating the source, was not existing
            Volume = 1.0f;

            SoundStream soundstream;

            switch (FileName.Extension)
            {
            case ".wav":
                //Load the sound and bufferize it
                soundstream = new SoundStream(File.OpenRead(FileName.FullName));
                WaveFormat  = soundstream.Format;
                AudioBuffer = new AudioBuffer()
                {
                    Stream     = soundstream.ToDataStream(),
                    AudioBytes = (int)soundstream.Length,
                    Flags      = BufferFlags.EndOfStream
                };

                soundstream.Close();
                soundstream.Dispose();
                break;

            case ".wma":     // NOT good idea this can be HUGE buffer, better streaming a WMA file !
                //New data stream
                using (FileStream fileStream = new FileStream(FileName.FullName, FileMode.Open, FileAccess.Read))
                {
                    var audioDecoder    = new AudioDecoder(fileStream);
                    var outputWavStream = new MemoryStream();

                    var wavWriter = new WavWriter(outputWavStream);

                    // Write the WAV file
                    wavWriter.Begin(audioDecoder.WaveFormat);
                    // Decode the samples from the input file and output PCM raw data to the WAV stream.
                    wavWriter.AppendData(audioDecoder.GetSamples());
                    // Close the wav writer.
                    wavWriter.End();

                    outputWavStream.Position = 0;
                    soundstream = new SoundStream(outputWavStream);

                    WaveFormat  = soundstream.Format;
                    AudioBuffer = new AudioBuffer()
                    {
                        Stream     = soundstream.ToDataStream(),
                        AudioBytes = (int)soundstream.Length,
                        Flags      = BufferFlags.EndOfStream
                    };

                    soundstream.Close();
                    soundstream.Dispose();
                    outputWavStream.Dispose();
                    audioDecoder.Dispose();
                }

                break;

            default:
                break;
            }
        }
Exemple #24
0
 /// <summary>
 ///   Closes the underlying stream.
 /// </summary>
 ///
 public void Close()
 {
     waveStream.Close();
 }