public XAudioMusic(string filename, XAudioDevice device) : base(filename, device) { musicStream = new Mp3Stream(File.OpenRead("Content/" + filename + ".mp3")); source = new SourceVoice(device.XAudio2, new WaveFormat(musicStream.Samplerate, 16, musicStream.Channels), false); CreateBuffers(); }
/// <summary> /// Plays the given sound. /// </summary> /// <param name="soundFile">The sound file to be played.</param> /// <param name="volume">The volume of the sound to be played.</param> public async Task PlaySoundAsync(CachedSoundFile soundFile, float volume = 1f) { soundFile.EnsureNotNullOrDisposed(nameof(soundFile)); volume.EnsurePositive(nameof(volume)); if (!m_xaudioDevice.IsLoaded) { return; } // Play the sound on the device using (var sourceVoice = new XA.SourceVoice(m_xaudioDevice.Device, soundFile.Format, true)) { // Register the created voice m_playingVoices.Add(sourceVoice); // Start voice playing TaskCompletionSource<object> complSource = new TaskCompletionSource<object>(); sourceVoice.SubmitSourceBuffer(soundFile.AudioBuffer, soundFile.DecodedPacketsInfo); sourceVoice.SetVolume(volume); sourceVoice.BufferEnd += (pointer) => { complSource.TrySetResult(null); }; sourceVoice.Start(); // Await finished playing await complSource.Task; // Destroy the voice object // A NullReference is raised later, if we forget this call sourceVoice.DestroyVoice(); // Remove the created voice finally m_playingVoices.Remove(sourceVoice); } }
public override void Dispose(bool disposing) { if (disposing) { if (sourceVoice != null) { sourceVoice.FlushSourceBuffers(); sourceVoice.Stop(); sourceVoice.Dispose(); sourceVoice = null; } if (xAudioBuffer != null) { xAudioBuffer.Stream.Dispose(); xAudioBuffer.Stream = null; xAudioBuffer = null; } if (xAudio != null) { xAudio.StopEngine(); xAudio.Dispose(); xAudio = null; } } }
/// <summary> /// Initializes a new instance of the <see cref="AudioPlayer" /> class. /// </summary> /// <param name="xaudio2">The xaudio2 engine.</param> /// <param name="audioStream">The input audio stream.</param> public AudioPlayer(XAudio2 xaudio2, Stream audioStream) { this.xaudio2 = xaudio2; audioDecoder = new AudioDecoder(audioStream); //sourceVoice = new SourceVoice(xaudio2, audioDecoder.WaveFormat); sourceVoice = new SourceVoice(xaudio2, audioDecoder.WaveFormat,0,1.0f); localVolume = 1.0f; sourceVoice.BufferEnd += sourceVoice_BufferEnd; sourceVoice.Start(); bufferEndEvent = new AutoResetEvent(false); playEvent = new ManualResetEvent(false); waitForPlayToOutput = new ManualResetEvent(false); clock = new Stopwatch(); // Pre-allocate buffers audioBuffersRing = new AudioBuffer[3]; memBuffers = new DataPointer[audioBuffersRing.Length]; for (int i = 0; i < audioBuffersRing.Length; i++) { audioBuffersRing[i] = new AudioBuffer(); memBuffers[i].Size = 32 * 1024; // default size 32Kb memBuffers[i].Pointer = Utilities.AllocateMemory(memBuffers[i].Size); } // Initialize to stopped State = AudioPlayerState.Stopped; // Starts the playing thread playingTask = Task.Factory.StartNew(PlayAsync, TaskCreationOptions.LongRunning); }
public void startSound(int loop = -1) { buffer.LoopCount = loop; sourceVoice = new SourceVoice(device, stream.Format, true); sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourceVoice.Start(); }
public void Play() { _sourceVoice = new SourceVoice(_xaudio, _waveFormat, true); _sourceVoice.SubmitSourceBuffer(_buffer, _soundstream.DecodedPacketsInfo); _sourceVoice.Start(); IsPlaying = true; }
/// <summary> /// /// </summary> /// <param name="effect"></param> /// <param name="voice"></param> internal SoundEffectInstance( AudioDevice device, SoundEffect effect, SourceVoice voice ) { this.device = device; _effect = effect; _voice = voice; }
/// <summary> /// /// </summary> /// <param name="effect"></param> /// <param name="voice"></param> internal SoundEffectInstance( SoundSystem device, SoundEffect effect, SourceVoice voice ) { this.device = device; _effect = effect; _voice = voice; }
public void RestartSong() { voice.Stop(); voice = new SourceVoice(xAudio, firstWave.Buffer.WaveFormat, true); voice.SourceSampleRate = defaultSampleRate; voice.SubmitSourceBuffer(firstWave.Buffer, firstWave.Buffer.DecodedPacketsInfo); voice.Start(); }
public MySourceVoice(MySourceVoicePool owner, SourceVoice voice) { m_owner = owner; m_voice = voice; m_isPlaying = false; m_isPaused = false; m_isLoopable = false; }
public MySourceVoice(XAudio2 device, WaveFormat sourceFormat) { m_voice = new SourceVoice(device, sourceFormat, true); m_voice.BufferEnd += OnStopPlaying; m_valid = true; Flush(); }
public void Add(int index, UnmanagedMemoryStream sourceStream) { streams[index] = new SoundStream(sourceStream); buffers[index] = new AudioBuffer(); buffers[index].Stream = streams[index].ToDataStream(); buffers[index].AudioBytes = (int)streams[index].Length; buffers[index].Flags = BufferFlags.EndOfStream; voices[index] = new SourceVoice(audio, streams[index].Format); }
private void DisposeSource() { for (int i = 0; i < NumberOfBuffers; i++) buffers[i] = null; source.FlushSourceBuffers(); source.DestroyVoice(); source.Dispose(); source = null; }
//Adds the sound we want to play public void Add(int index, UnmanagedMemoryStream stream) { _soundStreams[index] = new SoundStream(stream); _audioBuffers[index] = new AudioBuffer(); _audioBuffers[index].Stream = _soundStreams[index].ToDataStream(); _audioBuffers[index].AudioBytes = (int) _soundStreams[index].Length; _audioBuffers[index].Flags = BufferFlags.EndOfStream; _sourceVoices[index] = new SourceVoice(_audio, _soundStreams[index].Format); }
public SoundManager(int sounds) { _audio = new XAudio2(); _masteringVoice = new MasteringVoice(_audio); _masteringVoice.SetVolume(0.5f); _soundStreams = new SoundStream[sounds]; _audioBuffers = new AudioBuffer[sounds]; _sourceVoices = new SourceVoice[sounds]; }
private void TryLoadData(Stream fileData) { if ((device as XAudioDevice).XAudio == null) return; var stream = new MemoryStream(); fileData.CopyTo(stream); stream.Position = 0; musicStream = new MusicStreamFactory().Load(stream); source = new SourceVoice((device as XAudioDevice).XAudio, new WaveFormat(musicStream.Samplerate, 16, musicStream.Channels), false); }
public void ChangeSpeed(double modifier) { AudioBufferAndMetaData newBuffer = firstWave.Buffer; voice.Stop(); newBuffer.Stream.Read(firstWave.Buffer.Stream.PositionPointer, 100000, 3); voice = new SourceVoice(xAudio, newBuffer.WaveFormat, true); voice.SourceSampleRate = (int)(defaultSampleRate * modifier); voice.SubmitSourceBuffer(newBuffer, newBuffer.DecodedPacketsInfo); voice.Start(); }
public static void PlaySound(Stream soundStream, float volume = 1) { SourceVoice sourceVoice; var buffer = GetBuffer(soundStream); sourceVoice = new SourceVoice(XAudio, buffer.WaveFormat, true); sourceVoice.SetVolume(volume, SharpDX.XAudio2.XAudio2.CommitNow); sourceVoice.SubmitSourceBuffer(buffer, buffer.DecodedPacketsInfo); sourceVoice.BufferEnd += SourceVoiceOnBufferEnd; sourceVoice.Start(); }
public void PlayWave(string key) { var wave = waves.FirstOrDefault(x => x.Key == key); firstWave = wave; if (wave != null) { voice = new SourceVoice(xAudio, wave.Buffer.WaveFormat, true); voice.SourceSampleRate = defaultSampleRate; voice.SubmitSourceBuffer(wave.Buffer, wave.Buffer.DecodedPacketsInfo); voice.Start(); } }
private void PlatformDispose(bool disposing) { if (disposing) { if (_voice != null) { _voice.DestroyVoice(); _voice.Dispose(); } } _voice = null; _effect = null; }
protected override void LoadData(Stream fileData) { try { video = new VideoStreamFactory().Load(fileData, "Content/" + Name); source = new SourceVoice((device as XAudioDevice).XAudio, new WaveFormat(video.Samplerate, 16, video.Channels), false); } catch (Exception ex) { if (Debugger.IsAttached) throw new VideoNotFoundOrAccessible(Name, ex); } }
/// <summary> /// Constructor /// </summary> /// <param name="device">Device</param> /// <param name="filename">Filename</param> public SharpAudioVoice(SharpAudioDevice device, string filename) { _stream = new SoundStream(File.OpenRead(filename)); var waveFormat = _stream.Format; _voice = new SourceVoice(device.Device, waveFormat); _buffer = new AudioBuffer { Stream = _stream.ToDataStream(), AudioBytes = (int)_stream.Length, Flags = BufferFlags.EndOfStream }; }
protected override void BeginBufferChange() { if (xAudioBuffer != null && xAudioBuffer.Stream != null) { sourceVoice.FlushSourceBuffers(); sourceVoice.Stop(); sourceVoice.Dispose(); sourceVoice = null; xAudioBuffer.Stream.Dispose(); xAudioBuffer.Stream = null; audioBufferHandle.Free(); audioBufferHandle = default(GCHandle); } }
public MySourceVoice NextAvailable() { if (m_availableVoices.Count == 0) { SourceVoice sourceVoice = new SourceVoice(m_audioEngine, m_waveFormat, VoiceFlags.None, XAudio2.MaximumFrequencyRatio, true); MySourceVoice voice = new MySourceVoice(this, sourceVoice); sourceVoice.BufferEnd += voice.OnStopPlaying; m_availableVoices.Enqueue(voice); ++m_maxCount; } MySourceVoice next = m_availableVoices.Dequeue(); m_playingVoices.Add(next); return next; }
/// <summary> /// Function to play an mp3. /// </summary> /// <param name="path">The path to the mp3 file.</param> public async Task PlayMp3Async(string path) { if (_currentPlayback != null) { await _currentPlayback; return; } _tokenSource = new CancellationTokenSource(); _currentPlayback = Task.Run(() => { var stream = new Mm.SoundStream(File.Open(path, FileMode.Open, FileAccess.Read, FileShare.None)); Mm.WaveFormat format = stream.Format; var buffer = new Xa.AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = Xa.BufferFlags.EndOfStream }; stream.Close(); var source = new Xa.SourceVoice(_audio, format); source.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); source.Start(); try { while ((!_tokenSource.Token.IsCancellationRequested) && (!source.IsDisposed) && (source.State.BuffersQueued > 0)) { Thread.Sleep(10); } source.Stop(); } finally { buffer.Stream?.Dispose(); source.Dispose(); stream?.Dispose(); } }, _tokenSource.Token); await _currentPlayback; _currentPlayback = null; }
public void Apply3D(SourceVoice voice, Listener listener, Emitter emitter, float maxDistance, float frequencyRatio) { m_x3dAudio.Calculate(listener, emitter, CalculateFlags.Matrix | CalculateFlags.Doppler, m_dsp); if (emitter.InnerRadius == 0f) { // approximated decay by distance float decay = MathHelper.Clamp(1f - m_dsp.EmitterToListenerDistance / maxDistance, 0f, 1f); for (int i = 0; i < m_dsp.MatrixCoefficients.Length; ++i) { m_dsp.MatrixCoefficients[i] *= decay; } } voice.SetOutputMatrix(m_dsp.SourceChannelCount, m_dsp.DestinationChannelCount, m_dsp.MatrixCoefficients); voice.SetFrequencyRatio(frequencyRatio * m_dsp.DopplerFactor); }
public PlayForm() { InitializeComponent(); // Initalize XAudio2 xaudio2 = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor); masteringVoice = new MasteringVoice(xaudio2); var waveFormat = new WaveFormat(44100, 32, 2); sourceVoice = new SourceVoice(xaudio2, waveFormat); int bufferSize = waveFormat.ConvertLatencyToByteSize(60000); DataStream dataStream = new DataStream(bufferSize, true, true); // Prepare the initial sound to modulate int numberOfSamples = bufferSize / waveFormat.BlockAlign; for (int i = 0; i < numberOfSamples; i++) { float value = (float)(Math.Cos(2 * Math.PI * 220.0 * i / waveFormat.SampleRate) * 0.5); dataStream.Write(value); dataStream.Write(value); } dataStream.Position = 0; audioBuffer = new AudioBuffer { Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize, LoopBegin = 0, LoopLength = numberOfSamples, LoopCount = AudioBuffer.LoopInfinite }; // Set the effect on the source ModulatorEffect = new ModulatorEffect(); modulatorDescriptor = new EffectDescriptor(ModulatorEffect); reverb = new Reverb(xaudio2); effectDescriptor = new EffectDescriptor(reverb); //sourceVoice.SetEffectChain(modulatorDescriptor, effectDescriptor); sourceVoice.SetEffectChain(modulatorDescriptor); //sourceVoice.EnableEffect(0); this.Closed += new EventHandler(PlayForm_Closed); }
public override void Setup(WaveFormatEx format) { _format = new WaveFormatExtensible(format.nSamplesPerSec, format.wBitsPerSample, format.nChannels); _voice = new SourceVoice(_xaudio2, _format); _voice.StreamEnd += _voice_StreamEnd; _voice.VoiceError += _voice_VoiceError; _emitter = new Emitter { ChannelAzimuths = GetAzimuths(_format.Channels), ChannelCount = _format.Channels, ChannelRadius = 10, CurveDistanceScaler = float.MinValue, OrientFront = new Vector3(0, 0, 1), OrientTop = new Vector3(0, 1, 0), Position = new Vector3(0, 0, 0), Velocity = new Vector3(0, 0, 0) }; }
public MySourceVoice(MySourceVoicePool owner, XAudio2 device, WaveFormat sourceFormat) { // This value influences how many native memory is allocated in XAudio // When shifting sound to higher frequency it needs more data, because it's compressed in time // Ratio 2 equals to 11 or 12 semitones (=1 octave) // Values around 32 should be pretty safe // Values around 128 needs large amount of memory // Values > 128 are memory killer const float MaxFrequencyRatio = 2; m_voice = new SourceVoice(device, sourceFormat, VoiceFlags.UseFilter, MaxFrequencyRatio, true); m_voice.BufferEnd += OnStopPlaying; m_valid = true; m_owner = owner; m_owner.OnAudioEngineChanged += m_owner_OnAudioEngineChanged; Flush(); }
public static void PlaySound(string soundfile, float volume = 1) { SourceVoice sourceVoice; if (!LoadedSounds.ContainsKey(soundfile)) { var buffer = GetBuffer(soundfile); sourceVoice = new SourceVoice(XAudio, buffer.WaveFormat, true); sourceVoice.SetVolume(volume, SharpDX.XAudio2.XAudio2.CommitNow); sourceVoice.SubmitSourceBuffer(buffer, buffer.DecodedPacketsInfo); sourceVoice.Start(); } else { sourceVoice = LoadedSounds[soundfile]; if (sourceVoice != null) sourceVoice.Stop(); } }
/// <summary> /// SharpDX XAudio2 sample. Plays a generated sound with some reverb. /// </summary> static void Main(string[] args) { var xaudio2 = new XAudio2(); var masteringVoice = new MasteringVoice(xaudio2); var waveFormat = new WaveFormat(44100, 32, 2); var sourceVoice = new SourceVoice(xaudio2, waveFormat); int bufferSize = waveFormat.ConvertLatencyToByteSize(60000); var dataStream = new DataStream(bufferSize, true, true); int numberOfSamples = bufferSize/waveFormat.BlockAlign; for (int i = 0; i < numberOfSamples; i++) { double vibrato = Math.Cos(2 * Math.PI * 10.0 * i / waveFormat.SampleRate); float value = (float) (Math.Cos(2*Math.PI*(220.0 + 4.0*vibrato)*i/waveFormat.SampleRate)*0.5); dataStream.Write(value); dataStream.Write(value); } dataStream.Position = 0; var audioBuffer = new AudioBuffer {Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize}; var reverb = new Reverb(); var effectDescriptor = new EffectDescriptor(reverb); sourceVoice.SetEffectChain(effectDescriptor); sourceVoice.EnableEffect(0); sourceVoice.SubmitSourceBuffer(audioBuffer, null); sourceVoice.Start(); Console.WriteLine("Play sound"); for(int i = 0; i < 60; i++) { Console.Write("."); Console.Out.Flush(); Thread.Sleep(1000); } }
/// <summary> /// Play a sound file. Supported format are Wav(pcm+adpcm) and XWMA /// </summary> /// <param name="device">The device.</param> /// <param name="text">Text to display</param> /// <param name="fileName">Name of the file.</param> static void PLaySoundFile(XAudio2 device, string text, string fileName) { Console.WriteLine("{0} => {1} (Press esc to skip)", text, fileName); var stream = new SoundStream(File.OpenRead(fileName)); var waveFormat = stream.Format; var buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int) stream.Length, Flags = BufferFlags.EndOfStream }; stream.Close(); var sourceVoice = new SourceVoice(device, waveFormat, true); // Adds a sample callback to check that they are working on source voices sourceVoice.BufferEnd += (context) => Console.WriteLine(" => event received: end of buffer"); sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourceVoice.Start(); int count = 0; while (sourceVoice.State.BuffersQueued > 0 && !IsKeyPressed(ConsoleKey.Escape)) { if (count == 50) { Console.Write("."); Console.Out.Flush(); count = 0; } Thread.Sleep(10); count++; } Console.WriteLine(); sourceVoice.DestroyVoice(); sourceVoice.Dispose(); buffer.Stream.Dispose(); }
public VoiceCallbackImpl(SourceVoice voice) { Voice = voice; }