Ejemplo n.º 1
0
        /// <summary>
        /// Возвращает аудио буфер для файла.
        /// </summary>
        /// <param name="source">Путь к файлу.</param>
        private async Task <AudioBufferAndMetaData> GetBuffer(string source)
        {
            if (cachedBuffers.ContainsKey(source))
            {
                return(cachedBuffers[source]);
            }

            var stream = (await(await StorageFile.GetFileFromApplicationUriAsync(new Uri(source)))
                          .OpenReadAsync()).AsStreamForRead();

            lock (lockObject)
            {
                var soundstream = new SoundStream(stream);
                var buffer      = new AudioBufferAndMetaData
                {
                    Stream             = soundstream.ToDataStream(),
                    AudioBytes         = (int)soundstream.Length,
                    Flags              = BufferFlags.EndOfStream,
                    WaveFormat         = soundstream.Format,
                    DecodedPacketsInfo = soundstream.DecodedPacketsInfo
                };

                cachedBuffers[source] = buffer;
                return(buffer);
            }
        }
Ejemplo n.º 2
0
        private MyWave LoadSound(string name)
        {
            if (name.IndexOf(".wav", StringComparison.Ordinal) == -1)
            {
                name = Path.Combine(soundsDir, $"{name}.wav");
            }

            var fileInfo = new FileInfo(name);

            if (!fileInfo.Exists)
            {
                return(null);
            }
            var soundStream = new SoundStream(File.OpenRead(name));
            var waveFormat  = soundStream.Format;

            var buffer = new AudioBuffer
            {
                Stream = soundStream.ToDataStream(), AudioBytes = (int)soundStream.Length, Flags = BufferFlags.EndOfStream
            };

            soundStream.Close();
            var wave = new MyWave {
                Buffer = buffer, WaveFormat = waveFormat, DecodedPacketsInfo = soundStream.DecodedPacketsInfo
            };

            Sounds[fileInfo.Name.Split('.').First()] = wave;
            Sounds[fileInfo.Name] = wave;
            return(wave);
        }
Ejemplo n.º 3
0
        private void PlayWaveHelper(string soundFile, string soundText)
        {
            var filepath = GetFilePath(soundFile, soundText).Result;

            var nativefilestream = new NativeFileStream(filepath, NativeFileMode.Open, NativeFileAccess.Read);

            using (var soundstream = new SoundStream(nativefilestream))
            {
                var waveFormat = soundstream.Format;
                var buffer     = new AudioBuffer
                {
                    Stream     = soundstream.ToDataStream(),
                    AudioBytes = (int)soundstream.Length,
                    Flags      = BufferFlags.EndOfStream
                };

                if (_sourceVoice != null)
                {
                    _sourceVoice.DestroyVoice();
                    _sourceVoice.Dispose();
                }

                _sourceVoice = new SourceVoice(_xAudio, waveFormat);

                _sourceVoice.SubmitSourceBuffer(buffer, soundstream.DecodedPacketsInfo);
                _sourceVoice.BufferEnd += obj =>
                {
                    _lock.Set();
                };

                _sourceVoice.Start();
            }
        }
Ejemplo n.º 4
0
        private void DirectSound_Start(object sender, EventArgs e)
        {
            soundStream = new SoundStream(File.OpenRead(loadFilePath));
            WaveFormat format = soundStream.Format;

            AudioBuffer buffer = new AudioBuffer
            {
                Stream     = soundStream.ToDataStream(),
                AudioBytes = (int)soundStream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            soundStream.Close();

            sourceVoice = new SourceVoice(xAudio2, format, true);
            sourceVoice.SubmitSourceBuffer(buffer, soundStream.DecodedPacketsInfo);
            sourceVoice.Start();

            if (directSoundEffect == 0)
            {
                SharpDX.XAPO.Fx.Echo effectEcho       = new SharpDX.XAPO.Fx.Echo(xAudio2);
                EffectDescriptor     effectDescriptor = new EffectDescriptor(effectEcho);
                sourceVoice.SetEffectChain(effectDescriptor);
                sourceVoice.EnableEffect(0);
            }
            else if (directSoundEffect == 1)
            {
                SharpDX.XAPO.Fx.Reverb effectReverb     = new SharpDX.XAPO.Fx.Reverb(xAudio2);
                EffectDescriptor       effectDescriptor = new EffectDescriptor(effectReverb);
                sourceVoice.SetEffectChain(effectDescriptor);
                sourceVoice.EnableEffect(0);
            }
        }
Ejemplo n.º 5
0
        private void PlatformLoadAudioStream(Stream s)
        {
            SoundStream soundStream = new SoundStream(s);

            _format     = soundStream.Format;
            _dataStream = soundStream.ToDataStream();

            _buffer = new AudioBuffer()
            {
                Stream     = _dataStream,
                AudioBytes = (int)_dataStream.Length,
                Flags      = BufferFlags.EndOfStream,
                PlayBegin  = 0,
                PlayLength = (int)_dataStream.Length / (2 * soundStream.Format.Channels),
                Context    = new IntPtr(42),
            };

            _loopedBuffer = new AudioBuffer()
            {
                Stream     = _dataStream,
                AudioBytes = (int)_dataStream.Length,
                Flags      = BufferFlags.EndOfStream,
                LoopBegin  = 0,
                LoopLength = (int)_dataStream.Length / (2 * soundStream.Format.Channels),
                LoopCount  = AudioBuffer.LoopInfinite,
                Context    = new IntPtr(42),
            };
        }
Ejemplo n.º 6
0
        public override async Task <ISoundPlayerBuilder <IStorageFileEx> > BuildAsync()
        {
            if (Input == null)
            {
                return(this);
            }

            DisposeInternally();

            IRandomAccessStreamEx streamOpenFile = await Input.OpenReadAsync();

            using (Stream nativeStream = streamOpenFile.AsStreamForRead())
            {
                using (var soundStream = new SoundStream(nativeStream))
                {
                    Description = Input.Name;

                    WaveFormat = soundStream.Format;

                    _dataStream = soundStream.ToDataStream();

                    SourceVoice = VoicePool.GetVoice(WaveFormat);

                    SourceVoice.PlayWith(_dataStream);
                }
            }

            return(this);
        }
Ejemplo n.º 7
0
        public EffectSound(string filename)
        {
            lock (loadedSounds)
            {
                EffectSound existingSound;
                if (loadedSounds.TryGetValue(filename, out existingSound))
                {
                    Stream = existingSound.Stream;
                    Buffer = existingSound.Buffer;
                    return;
                }
            }

            using (var fileStream = File.OpenRead(filename))
            {
                Stream = new SoundStream(fileStream);
                Buffer = new AudioBuffer
                {
                    Stream     = Stream.ToDataStream(),
                    AudioBytes = (int)Stream.Length,
                    Flags      = BufferFlags.EndOfStream
                };
                Stream.Close();
            }

            lock (loadedSounds)
            {
                loadedSounds[filename] = this;
            }
        }
Ejemplo n.º 8
0
        public static Task PlaySound(Stream stream)
        {
            var soundstream = new SoundStream(stream);
            var buffer      = new AudioBufferAndMetaData()
            {
                Stream             = soundstream.ToDataStream(),
                AudioBytes         = (int)soundstream.Length,
                Flags              = BufferFlags.EndOfStream,
                WaveFormat         = soundstream.Format,
                DecodedPacketsInfo = soundstream.DecodedPacketsInfo
            };

            var sourceVoice = new SourceVoice(XAudio, buffer.WaveFormat, true);

            sourceVoice.SetVolume(Volume, SharpDX.XAudio2.XAudio2.CommitNow);
            sourceVoice.SubmitSourceBuffer(buffer, buffer.DecodedPacketsInfo);


            //var effect = new SharpDX.XAPO.Fx.Echo(XAudio);
            //EffectDescriptor effectDescriptor = new EffectDescriptor(effect);
            //sourceVoice.SetEffectChain(effectDescriptor);
            //sourceVoice.EnableEffect(0);

            sourceVoice.Start();

            TaskCompletionSource <object> mediaDone = new TaskCompletionSource <object>();

            sourceVoice.StreamEnd += () => {
                mediaDone.SetResult(null);
            };

            return(mediaDone.Task);
        }
Ejemplo n.º 9
0
        public SoundEffect(string soundFxPath, bool infiniteLoop)
        {
            _xaudio = new XAudio2();
            var masteringsound = new MasteringVoice(_xaudio);

            var nativefilestream = new NativeFileStream(
                soundFxPath,
                NativeFileMode.Open,
                NativeFileAccess.Read,
                NativeFileShare.Read);

            _soundstream = new SoundStream(nativefilestream);
            _waveFormat  = _soundstream.Format;
            _buffer      = new AudioBuffer
            {
                Stream     = _soundstream.ToDataStream(),
                AudioBytes = (int)_soundstream.Length,
                Flags      = BufferFlags.EndOfStream
            };
            if (infiniteLoop)
            {
                _buffer.LoopCount = AudioBuffer.LoopInfinite;
            }
            isStarted   = false;
            sourceVoice = new SourceVoice(_xaudio, _waveFormat, true);
        }
Ejemplo n.º 10
0
        public MyInMemoryWave(MySoundData cue, string path, MyWaveBank owner, bool streamed = false)
        {
            using (var stream = MyFileSystem.OpenRead(path))
            {
                m_owner      = owner;
                m_path       = path;
                m_stream     = new SoundStream(stream);
                m_waveFormat = m_stream.Format;
                m_buffer     = new AudioBuffer
                {
                    Stream     = m_stream.ToDataStream(),
                    AudioBytes = (int)m_stream.Length,
                    Flags      = BufferFlags.None
                };

                if (cue.Loopable)
                {
                    m_buffer.LoopCount = AudioBuffer.LoopInfinite;
                }

                m_stream.Close();

                Streamed = streamed;
            }
        }
Ejemplo n.º 11
0
        private void Load()
        {
            m_SoundStream = new SoundStream(Sound.Stream);
            var waveFormat = m_SoundStream.Format;

            m_AudioBuffer = new AudioBuffer
            {
                Stream     = m_SoundStream.ToDataStream(),
                AudioBytes = (int)m_SoundStream.Length,
                Flags      = BufferFlags.EndOfStream
            };
            m_SoundStream.Close();

            m_Audio            = new SourceVoice(m_Device, waveFormat, true);
            m_Audio.BufferEnd += (context) =>
            {
                if (Background)
                {
                    if (IsPlaying)
                    {
                        m_Audio.SubmitSourceBuffer(m_AudioBuffer, m_SoundStream.DecodedPacketsInfo);
                        m_Audio.Start();
                    }
                }
                else
                {
                    m_PlaySync.Signal();
                    IsPlaying = false;
                }
            };
        }
Ejemplo n.º 12
0
        /// <summary>
        /// Abre o arquivo de carrega para o stream.
        /// </summary>
        /// <param name="filename"></param>
        public EngineSound(string filename)
        {
            Stream fileStream = new NativeFileStream(filename, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read);

            SoundStream = new SoundStream(fileStream);
            AudioBuffer = new AudioBuffer(SoundStream.ToDataStream());
            fileStream.Dispose();
        }
Ejemplo n.º 13
0
        public void PlaySound(string file, double volume)
        {
            if (!FileNameLookup.ContainsKey(file))
            {
                file = FileNameLookup[file] = new System.IO.FileInfo(file).FullName;
            }
            else
            {
                file = FileNameLookup[file];
            }
            if (!CurrentVoices.ContainsKey(file))
            {
                CurrentVoices.Add(file, new Queue <SourceVoice>());
            }
            //if (CurrentVoices[file].Count > 0)
            //{
            //    var voice = CurrentVoices[file].Dequeue();
            //    voice.SetVolume((float)volume);
            //    voice.Start();
            //    return;
            //}
            SoundData data;

            if (!Voices.ContainsKey(file))
            {
                using (var nativeFilestream = new NativeFileStream(file, NativeFileMode.Open, NativeFileAccess.Read))
                    using (var soundstream = new SoundStream(nativeFilestream))
                    {
                        var waveformat = soundstream.Format;
                        var buffer     = new AudioBuffer()
                        {
                            Stream     = soundstream.ToDataStream(),
                            AudioBytes = (int)soundstream.Length,
                            Flags      = BufferFlags.EndOfStream
                        };
                        data = new SoundData()
                        {
                            waveformat         = waveformat,
                            buffer             = buffer,
                            decodedPacketsInfo = soundstream.DecodedPacketsInfo
                        };
                        Voices.Add(file, data);
                    }
            }
            else
            {
                data = Voices[file];
            }
            var sourceVoice = new SourceVoice(xaudio, data.waveformat, true);

            sourceVoice.SubmitSourceBuffer(data.buffer, data.decodedPacketsInfo);
            sourceVoice.StreamEnd += () =>
            {
                CurrentVoices[file].Enqueue(sourceVoice);
            };
            sourceVoice.SetVolume((float)volume);
            sourceVoice.Start();
        }
Ejemplo n.º 14
0
        private void PlatformLoadAudioStream(Stream s)
        {
            var soundStream = new SoundStream(s);
            var dataStream  = soundStream.ToDataStream();

            CreateBuffers(soundStream.Format,
                          dataStream,
                          0);
        }
Ejemplo n.º 15
0
        private void TryLoadData(Stream fileData)
        {
            var soundStream = new SoundStream(fileData);

            format      = soundStream.Format;
            length      = CalculateLengthInSeconds(format, (int)soundStream.Length);
            buffer      = CreateAudioBuffer(soundStream.ToDataStream());
            decodedInfo = soundStream.DecodedPacketsInfo;
        }
Ejemplo n.º 16
0
        public void Load()
        {
            NativeFileStream nativeFileStream = new NativeFileStream(fileName, NativeFileMode.Open, NativeFileAccess.Read);
            SoundStream      soundStream      = new SoundStream(nativeFileStream);

            stream     = soundStream.ToDataStream();
            waveFormat = soundStream.Format;
            LoadNextVoice(defaultRepeat);
            isLoaded = true;
        }
        private void frmRomanSplashScreen_Load(object sender, EventArgs e)
        {
            this.Show();

            XAudio2  xaudio;
            Assembly assembly;

            AudioBuffer logo_buffer;
            SoundStream logo_soundstream;
            SourceVoice logo_voice;
            WaveFormat  logo_waveFormat;

            assembly = Assembly.GetExecutingAssembly();
            xaudio   = new XAudio2();
            var masteringsound = new MasteringVoice(xaudio);

            logo_soundstream = new SoundStream(assembly.GetManifestResourceStream("Arriba_Ultimate_Study_Guide.Audio.logosong.wav"));

            logo_waveFormat = logo_soundstream.Format;

            logo_buffer = new AudioBuffer
            {
                Stream     = logo_soundstream.ToDataStream(),
                AudioBytes = (int)logo_soundstream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            logo_voice = new SourceVoice(xaudio, logo_waveFormat, true);
            logo_voice.SubmitSourceBuffer(logo_buffer, logo_soundstream.DecodedPacketsInfo);
            logo_voice.Start();

            //if (installOnce == false)
            //{
            //    try
            //    {
            //        RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Bold.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-BoldItalic.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-ExtraBold.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-ExtraBoldItalic.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Italic.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Light.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-LightItalic.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Regular.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-Semibold.ttf");
            //        //RegisterFont("Arriba_Ultimate_Study_Guide.Fonts.OpenSans-SemiboldItalic.ttf");
            //    }
            //    catch (IOException error)
            //    {
            //        if (error.Source != null)
            //            MessageBox.Show("Cannot install OpenSans fonts from resource. IOException source: {0}, " + error.Source, "Arriba Ultimate Study Guide", MessageBoxButtons.OK, MessageBoxIcon.Error);
            //        throw;
            //    }
            //    installOnce = true;
            //}
        }
Ejemplo n.º 18
0
 public Audio(String fileName)
 {
     device         = new XAudio2();
     masteringVoice = new MasteringVoice(device);
     stream         = new SoundStream(File.OpenRead("Content/" + fileName));
     buffer         = new AudioBuffer {
         Stream     = stream.ToDataStream(),
         AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream
     };
     stream.Close();
 }
Ejemplo n.º 19
0
        private void PlatformLoadAudioStream(Stream s)
        {
            var soundStream  = new SoundStream(s);
            var dataStream   = soundStream.ToDataStream();
            var sampleLength = (int)(dataStream.Length / ((soundStream.Format.Channels * soundStream.Format.BitsPerSample) / 8));

            CreateBuffers(soundStream.Format,
                          dataStream,
                          0,
                          sampleLength);
        }
Ejemplo n.º 20
0
 public CachedSound(string FileName)
 {
     SoundStream = new SoundStream(File.OpenRead("Ressources\\Sound\\" + FileName + ".wav"));
     Buffer      = new AudioBuffer
     {
         Stream     = SoundStream.ToDataStream(),
         AudioBytes = (int)SoundStream.Length,
         Flags      = BufferFlags.EndOfStream
     };
     SoundStream.Close();
 }
Ejemplo n.º 21
0
        public void SetConvolutionStreams(Stream left, Stream right)
        {
            _leftConvolutionStream  = new SoundStream(left);
            _rightConvolutionStream = new SoundStream(right);

            var leftLength = (int)(_leftConvolutionStream.Length / sizeof(short));

            _leftConvolutionArray = Utils.MakeFloatFromShortSoundArray(_leftConvolutionStream.ToDataStream().ReadRange <short>(leftLength));

            var rightLength = (int)(_rightConvolutionStream.Length / sizeof(short));

            _rightConvolutionArray = Utils.MakeFloatFromShortSoundArray(_rightConvolutionStream.ToDataStream().ReadRange <short>(rightLength));
        }
Ejemplo n.º 22
0
        public static void PlayDirect(string file)
        {
            XAudio2        device = new XAudio2();
            MasteringVoice master = new MasteringVoice(device);
            SoundStream    stream = new SoundStream(new NativeFileStream(file, NativeFileMode.Open, NativeFileAccess.Read));
            SourceVoice    source = new SourceVoice(device, stream.Format, true);

            source.SubmitSourceBuffer(new AudioBuffer
            {
                Stream     = stream.ToDataStream(),
                AudioBytes = (int)stream.Length,
                Flags      = BufferFlags.EndOfStream
            }, stream.DecodedPacketsInfo);
            source.Start();
        }
Ejemplo n.º 23
0
        private AudioBufferAndMetaData GetBuffer(string soundfile)
        {
            var nativefilestream = new NativeFileStream(soundfile, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read);
            var soundstream      = new SoundStream(nativefilestream);
            var buffer           = new AudioBufferAndMetaData
            {
                Stream             = soundstream.ToDataStream(),
                AudioBytes         = (int)soundstream.Length,
                Flags              = BufferFlags.EndOfStream,
                WaveFormat         = soundstream.Format,
                DecodedPacketsInfo = soundstream.DecodedPacketsInfo
            };

            return(buffer);
        }
Ejemplo n.º 24
0
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="device">Device</param>
        /// <param name="filename">Filename</param>
        public SharpAudioVoice(SharpAudioDevice device, string filename)
        {
            _stream = new SoundStream(File.OpenRead(filename));

            var waveFormat = _stream.Format;

            _voice = new SourceVoice(device.Device, waveFormat);

            _buffer = new AudioBuffer
            {
                Stream     = _stream.ToDataStream(),
                AudioBytes = (int)_stream.Length,
                Flags      = BufferFlags.EndOfStream
            };
        }
Ejemplo n.º 25
0
        static private Tuple <WaveFormat, AudioBuffer, uint[], XAudio2> loadFile(string path)
        {
            var XAudio         = new XAudio2();
            var MasteringVoice = new MasteringVoice(XAudio);
            var stream         = new SoundStream(File.OpenRead(path));
            var waveFormat     = stream.Format;
            var buffer         = new AudioBuffer
            {
                Stream     = stream.ToDataStream(),
                AudioBytes = (int)stream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            stream.Close();
            return(new Tuple <WaveFormat, AudioBuffer, uint[], XAudio2>(waveFormat, buffer, stream.DecodedPacketsInfo, XAudio));
        }
Ejemplo n.º 26
0
        private void PlatformLoadAudioStream(Stream s, out TimeSpan duration)
        {
            var soundStream = new SoundStream(s);

            if (soundStream.Format.Encoding != WaveFormatEncoding.Pcm)
            {
                throw new ArgumentException("Ensure that the specified stream contains valid PCM mono or stereo wave data.");
            }

            var dataStream  = soundStream.ToDataStream();
            var sampleCount = (int)(dataStream.Length / ((soundStream.Format.Channels * soundStream.Format.BitsPerSample) / 8));

            CreateBuffers(soundStream.Format, dataStream, 0, sampleCount);

            duration = TimeSpan.FromSeconds((float)sampleCount / soundStream.Format.SampleRate);
        }
Ejemplo n.º 27
0
        private void InitializeAudio(AwcAudio audio, float playBegin = 0)
        {
            currentAudio = audio;
            trackLength  = audio.Length;

            if (xAudio2 == null)
            {
                xAudio2        = new XAudio2();
                masteringVoice = new MasteringVoice(xAudio2);
            }

            Stream      wavStream   = audio.GetWavStream();
            SoundStream soundStream = new SoundStream(wavStream);

            audioBuffer = new AudioBuffer
            {
                Stream     = soundStream.ToDataStream(),
                AudioBytes = (int)soundStream.Length,
                Flags      = BufferFlags.EndOfStream
            };
            if (playBegin > 0)
            {
                audioBuffer.PlayBegin = (int)(soundStream.Format.SampleRate * playBegin) / 128 * 128;
                if (playtime.IsRunning)
                {
                    playtime.Restart();
                }
                else
                {
                    playtime.Reset();
                }
                playBeginMs = (int)(playBegin * 1000);
            }
            else
            {
                playBeginMs = 0;
            }
            soundStream.Close();
            wavStream.Close();

            trackFinished = false;
            sourceVoice   = new SourceVoice(xAudio2, soundStream.Format, true);
            sourceVoice.SubmitSourceBuffer(audioBuffer, soundStream.DecodedPacketsInfo);
            sourceVoice.BufferEnd += (context) => trackFinished = true;
            sourceVoice.SetVolume((float)VolumeTrackBar.Value / 100);
        }
Ejemplo n.º 28
0
        /// <summary>
        /// Loads a wave file into a SourceVoice.
        /// </summary>
        /// <param name="FileName">The path of the file to load.</param>
        /// <param name="device">The XAudio2 device to load the sound on.</param>
        /// <param name="notificationsSupport">True to enable receiving notifications on this buffer, false otherwise. A notification might include an event when this buffer starts processing data, or when the buffer has finished playing. Set this parameter to true if you wish to receive a notification when the buffer is done playing by means of the function passed to setOnEnd.</param>
        /// <returns>A populated ExtendedAudioBuffer.</returns>
        public static ExtendedAudioBuffer LoadSound(string FileName, XAudio2 device, bool notificationsSupport)
        {
            if (!File.Exists(FileName))
            {
                throw (new ArgumentException("The sound " + FileName + " could not be found."));
            }
            SoundStream stream = new SoundStream(File.OpenRead(FileName));
            WaveFormat  format = stream.Format;            // So we don't lose reference to it when we close the stream.
            AudioBuffer buffer = new AudioBuffer {
                Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = SharpDX.XAudio2.BufferFlags.EndOfStream
            };

            // We can now safely close the stream.
            stream.Close();
            SourceVoice sv = new SourceVoice(device, format, VoiceFlags.None, 5.0f, notificationsSupport);

            return(new ExtendedAudioBuffer(buffer, sv));
        }
Ejemplo n.º 29
0
        protected SourceVoice CreateVoice(SharpDX.XAudio2.XAudio2 device, string fileName)
        {
            using (var stream = new SoundStream(File.OpenRead(fileName)))
            {
                _format = stream.Format;
                _buffer = new AudioBuffer
                {
                    Stream     = stream.ToDataStream(),
                    AudioBytes = (int)stream.Length,
                    Flags      = BufferFlags.EndOfStream
                };
                _packetsInfo = stream.DecodedPacketsInfo;
            }

            var sourceVoice = new SourceVoice(device, _format, true);

            return(sourceVoice);
        }
Ejemplo n.º 30
0
        public void PlayFX(System.IO.Stream resource)
        {
            var stream     = new SoundStream(resource);
            var waveFormat = stream.Format;
            var buffer     = new AudioBuffer
            {
                Stream     = stream.ToDataStream(),
                AudioBytes = (int)stream.Length,
                Flags      = BufferFlags.EndOfStream
            };

            stream.Close();

            var sourceVoice = new SourceVoice(xaudio2, waveFormat, true);

            sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo);
            sourceVoice.Start();
        }
Ejemplo n.º 31
0
        /// <summary>
        /// Creates a new instance of the <see cref="SoundEffect"/> class from the spefified data stream.
        /// </summary>
        /// <param name="audioManager">The audio manager associated to the created instance.</param>
        /// <param name="stream">The stream containing the data from which to create the effect.</param>
        /// <param name="name">The name of the effect (optional).</param>
        /// <returns>The created effect.</returns>
        public static SoundEffect FromStream(AudioManager audioManager, Stream stream, string name = null)
        {
            if (audioManager == null)
                throw new ArgumentNullException("audioManager");

            if (stream == null)
                throw new ArgumentNullException("stream");

            var sound = new SoundStream(stream);
            var format = sound.Format;
            var decodedPacketsInfo = sound.DecodedPacketsInfo;
            var buffer = sound.ToDataStream();

            sound.Dispose();

            return audioManager.ToDisposeAudioAsset(new SoundEffect(audioManager, name, format, buffer, decodedPacketsInfo));
        }