public void SubmitSourceBuffer(MySoundCuesEnum cueEnum, AudioBuffer buffer, uint[] decodedXMWAPacketInfo, int sampleRate) { m_cueEnum = cueEnum; m_isLoopable = (buffer.LoopCount > 0); m_voice.SourceSampleRate = sampleRate; m_voice.SubmitSourceBuffer(buffer, decodedXMWAPacketInfo); }
public override void Dispose() { base.Dispose(); if (buffer != null) buffer.Stream.Dispose(); buffer = null; }
/// <summary> /// Initializes a new instance of the <see cref="AudioPlayer" /> class. /// </summary> /// <param name="xaudio2">The xaudio2 engine.</param> /// <param name="audioStream">The input audio stream.</param> public AudioPlayer(XAudio2 xaudio2, Stream audioStream) { this.xaudio2 = xaudio2; audioDecoder = new AudioDecoder(audioStream); //sourceVoice = new SourceVoice(xaudio2, audioDecoder.WaveFormat); sourceVoice = new SourceVoice(xaudio2, audioDecoder.WaveFormat,0,1.0f); localVolume = 1.0f; sourceVoice.BufferEnd += sourceVoice_BufferEnd; sourceVoice.Start(); bufferEndEvent = new AutoResetEvent(false); playEvent = new ManualResetEvent(false); waitForPlayToOutput = new ManualResetEvent(false); clock = new Stopwatch(); // Pre-allocate buffers audioBuffersRing = new AudioBuffer[3]; memBuffers = new DataPointer[audioBuffersRing.Length]; for (int i = 0; i < audioBuffersRing.Length; i++) { audioBuffersRing[i] = new AudioBuffer(); memBuffers[i].Size = 32 * 1024; // default size 32Kb memBuffers[i].Pointer = Utilities.AllocateMemory(memBuffers[i].Size); } // Initialize to stopped State = AudioPlayerState.Stopped; // Starts the playing thread playingTask = Task.Factory.StartNew(PlayAsync, TaskCreationOptions.LongRunning); }
public EffectSound(string filename) { lock (loadedSounds) { EffectSound existingSound; if (loadedSounds.TryGetValue(filename, out existingSound)) { Stream = existingSound.Stream; Buffer = existingSound.Buffer; return; } } using (var fileStream = File.OpenRead(filename)) { Stream = new SoundStream(fileStream); Buffer = new AudioBuffer { Stream = Stream.ToDataStream(), AudioBytes = (int)Stream.Length, Flags = BufferFlags.EndOfStream }; Stream.Close(); } lock (loadedSounds) { loadedSounds[filename] = this; } }
public override void Dispose(bool disposing) { if (disposing) { if (sourceVoice != null) { sourceVoice.FlushSourceBuffers(); sourceVoice.Stop(); sourceVoice.Dispose(); sourceVoice = null; } if (xAudioBuffer != null) { xAudioBuffer.Stream.Dispose(); xAudioBuffer.Stream = null; xAudioBuffer = null; } if (xAudio != null) { xAudio.StopEngine(); xAudio.Dispose(); xAudio = null; } } }
protected override void DisposeData() { base.DisposeData(); if (buffer != null) buffer.Stream.Dispose(); buffer = null; }
/// <summary> /// Loads a sound file from the given resource. /// </summary> /// <param name="resource">The resource to load.</param> public static async Task <CachedSoundFile> FromResourceAsync(ResourceLink resource) { resource.EnsureNotNull(nameof(resource)); CachedSoundFile result = new CachedSoundFile(); using (Stream inStream = await resource.OpenInputStreamAsync()) using (SDXM.SoundStream stream = new SDXM.SoundStream(inStream)) { await Task.Factory.StartNew(() => { // Read all data into the adio buffer SDXM.WaveFormat waveFormat = stream.Format; XA.AudioBuffer buffer = new XA.AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = XA.BufferFlags.EndOfStream }; // Store members result.m_decodedPacketsInfo = stream.DecodedPacketsInfo; result.m_format = waveFormat; result.m_audioBuffer = buffer; }); } return(result); }
public Audio(String fileName) { device = new XAudio2(); masteringVoice = new MasteringVoice(device); stream = new SoundStream(File.OpenRead("Content/"+fileName)); buffer = new AudioBuffer {Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream}; stream.Close(); }
//Adds the sound we want to play public void Add(int index, UnmanagedMemoryStream stream) { _soundStreams[index] = new SoundStream(stream); _audioBuffers[index] = new AudioBuffer(); _audioBuffers[index].Stream = _soundStreams[index].ToDataStream(); _audioBuffers[index].AudioBytes = (int) _soundStreams[index].Length; _audioBuffers[index].Flags = BufferFlags.EndOfStream; _sourceVoices[index] = new SourceVoice(_audio, _soundStreams[index].Format); }
public SoundManager(int sounds) { _audio = new XAudio2(); _masteringVoice = new MasteringVoice(_audio); _masteringVoice.SetVolume(0.5f); _soundStreams = new SoundStream[sounds]; _audioBuffers = new AudioBuffer[sounds]; _sourceVoices = new SourceVoice[sounds]; }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { if (m_audioBuffer != null) { GraphicsHelper.DisposeObject(m_audioBuffer.Stream); m_format = null; m_audioBuffer = null; } }
public void Add(int index, UnmanagedMemoryStream sourceStream) { streams[index] = new SoundStream(sourceStream); buffers[index] = new AudioBuffer(); buffers[index].Stream = streams[index].ToDataStream(); buffers[index].AudioBytes = (int)streams[index].Length; buffers[index].Flags = BufferFlags.EndOfStream; voices[index] = new SourceVoice(audio, streams[index].Format); }
public XAudioBuffer(int size) { _samples = new byte[size]; _dataStream = DataStream.Create(_samples, true, true); Buffer = new AudioBuffer { Stream = _dataStream, AudioBytes = (int)_dataStream.Length, Flags = BufferFlags.EndOfStream }; }
public XAudioSound(string filename, XAudioDevice device) : base(filename, device) { xAudio = device.XAudio2; using (var stream = LoadStream("Content/" + filename + ".wav")) { format = stream.Format; length = CalculateLengthInSeconds(format, (int)stream.Length); buffer = CreateAudioBuffer(stream.ToDataStream()); decodedInfo = stream.DecodedPacketsInfo; } }
/// <summary> /// Constructor /// </summary> /// <param name="device">Device</param> /// <param name="filename">Filename</param> public SharpAudioVoice(SharpAudioDevice device, string filename) { _stream = new SoundStream(File.OpenRead(filename)); var waveFormat = _stream.Format; _voice = new SourceVoice(device.Device, waveFormat); _buffer = new AudioBuffer { Stream = _stream.ToDataStream(), AudioBytes = (int)_stream.Length, Flags = BufferFlags.EndOfStream }; }
public void EnqueueAudioSample(byte[] buf) { // Allocate a new buffer because ours will go away after this call AudioBuffer buffer = new AudioBuffer(DataStream.Create<byte>(buf, true, false)); try { sourceVoice.SubmitSourceBuffer(buffer, null); } catch (Exception e) { Debug.WriteLine("Audio exception"); Debug.WriteLine(e.Message); } }
public XAudio2AudioSource(XAudio2Engine engine) { _engine = engine; WaveFormat waveFormat = new WaveFormat(44000, 16, 1); _sourceVoice = new SourceVoice(_engine.XAudio2, waveFormat, VoiceFlags.None, maxFrequencyRatio: 2.0f); _audioBuffer = new SharpDX.XAudio2.AudioBuffer(); _emitter = new Emitter() { OrientFront = new RawVector3(0, 0, 1), OrientTop = new RawVector3(0, 1, 0), CurveDistanceScaler = 1 }; engine.ListenerChanged += OnListenerChanged; }
public MyInMemoryWave(MyObjectBuilder_CueDefinition cue, string filename) { m_stream = new SoundStream(File.OpenRead(filename)); m_waveFormat = m_stream.Format; m_buffer = new AudioBuffer { Stream = m_stream.ToDataStream(), AudioBytes = (int)m_stream.Length, Flags = BufferFlags.None }; if (cue.Loopable) m_buffer.LoopCount = AudioBuffer.LoopInfinite; m_stream.Close(); }
private void SubmitBufferImpl(byte[] buffer, int offset, int byteCount) { var gcHandle = GCHandle.Alloc(buffer, GCHandleType.Pinned); submittedBufferHandles.Enqueue(new SubBufferDataHandles(gcHandle)); var audioBuffer = new AudioBuffer { AudioDataPointer = gcHandle.AddrOfPinnedObject()+offset, AudioBytes = byteCount, }; SourceVoice.SubmitSourceBuffer(audioBuffer, null); Interlocked.Increment(ref pendingBufferCount); Interlocked.Increment(ref internalPendingBufferCount); }
/// <summary> /// Function to play an mp3. /// </summary> /// <param name="path">The path to the mp3 file.</param> public async Task PlayMp3Async(string path) { if (_currentPlayback != null) { await _currentPlayback; return; } _tokenSource = new CancellationTokenSource(); _currentPlayback = Task.Run(() => { var stream = new Mm.SoundStream(File.Open(path, FileMode.Open, FileAccess.Read, FileShare.None)); Mm.WaveFormat format = stream.Format; var buffer = new Xa.AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = Xa.BufferFlags.EndOfStream }; stream.Close(); var source = new Xa.SourceVoice(_audio, format); source.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); source.Start(); try { while ((!_tokenSource.Token.IsCancellationRequested) && (!source.IsDisposed) && (source.State.BuffersQueued > 0)) { Thread.Sleep(10); } source.Stop(); } finally { buffer.Stream?.Dispose(); source.Dispose(); stream?.Dispose(); } }, _tokenSource.Token); await _currentPlayback; _currentPlayback = null; }
public PlayForm() { InitializeComponent(); // Initalize XAudio2 xaudio2 = new XAudio2(XAudio2Flags.None, ProcessorSpecifier.DefaultProcessor); masteringVoice = new MasteringVoice(xaudio2); var waveFormat = new WaveFormat(44100, 32, 2); sourceVoice = new SourceVoice(xaudio2, waveFormat); int bufferSize = waveFormat.ConvertLatencyToByteSize(60000); DataStream dataStream = new DataStream(bufferSize, true, true); // Prepare the initial sound to modulate int numberOfSamples = bufferSize / waveFormat.BlockAlign; for (int i = 0; i < numberOfSamples; i++) { float value = (float)(Math.Cos(2 * Math.PI * 220.0 * i / waveFormat.SampleRate) * 0.5); dataStream.Write(value); dataStream.Write(value); } dataStream.Position = 0; audioBuffer = new AudioBuffer { Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize, LoopBegin = 0, LoopLength = numberOfSamples, LoopCount = AudioBuffer.LoopInfinite }; // Set the effect on the source ModulatorEffect = new ModulatorEffect(); modulatorDescriptor = new EffectDescriptor(ModulatorEffect); reverb = new Reverb(xaudio2); effectDescriptor = new EffectDescriptor(reverb); //sourceVoice.SetEffectChain(modulatorDescriptor, effectDescriptor); sourceVoice.SetEffectChain(modulatorDescriptor); //sourceVoice.EnableEffect(0); this.Closed += new EventHandler(PlayForm_Closed); }
public MyInMemoryWave(MySoundData cue, string path) { using (var stream = MyFileSystem.OpenRead(path)) { m_stream = new SoundStream(stream); m_waveFormat = m_stream.Format; m_buffer = new AudioBuffer { Stream = m_stream.ToDataStream(), AudioBytes = (int)m_stream.Length, Flags = BufferFlags.None }; if (cue.Loopable) m_buffer.LoopCount = AudioBuffer.LoopInfinite; m_stream.Close(); } }
protected override void EndBufferChange() { if (AudioBuffer != null) { if (xAudioBuffer == null) { xAudioBuffer = new XAudioBuffer(); } audioBufferHandle = GCHandle.Alloc(AudioBuffer.RawBuffer, GCHandleType.Pinned); xAudioBuffer.Stream = new DataStream(audioBufferHandle.AddrOfPinnedObject(), AudioBuffer.SizeInBytes, true, true); xAudioBuffer.AudioBytes = (int)xAudioBuffer.Stream.Length; xAudioBuffer.LoopLength = AudioBuffer.RawBuffer.Length / 2; xAudioBuffer.LoopCount = XAudio2.MaximumLoopCount; sourceVoice = new SourceVoice(xAudio, waveFormat); sourceVoice.SubmitSourceBuffer(xAudioBuffer, null); sourceVoice.Start(); } }
/// <summary> /// Load the sound data from the sound-file. /// </summary> public override void Load() { var nativeFileStream = new NativeFileStream( File.Path, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read); m_soundStream = new SoundStream(nativeFileStream); m_waveFormat = m_soundStream.Format; m_buffer = new AudioBuffer { Stream = m_soundStream.ToDataStream(), AudioBytes = (int)m_soundStream.Length, Flags = BufferFlags.EndOfStream }; OnLoaded(); }
public SoundEffect(string soundFxPath) { _xaudio = new XAudio2(); var masteringsound = new MasteringVoice(_xaudio); var nativefilestream = new NativeFileStream( soundFxPath, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read); _soundstream = new SoundStream(nativefilestream); _waveFormat = _soundstream.Format; _buffer = new AudioBuffer { Stream = _soundstream.ToDataStream(), AudioBytes = (int)_soundstream.Length, Flags = BufferFlags.EndOfStream }; }
public override void PlayBuffer(byte[] buffer) { if (_voice.IsDisposed) return; var dataStream = new DataStream(buffer.Length, true, true); dataStream.Write(buffer, 0, buffer.Length); dataStream.Position = 0; var audioBuffer = new AudioBuffer { Stream = dataStream, AudioBytes = buffer.Length, Flags = BufferFlags.EndOfStream }; _voice.SubmitSourceBuffer(audioBuffer, null); Update(); if (!_isPlaying) Play(); }
/// <summary> /// SharpDX XAudio2 sample. Plays a generated sound with some reverb. /// </summary> static void Main(string[] args) { var xaudio2 = new XAudio2(); var masteringVoice = new MasteringVoice(xaudio2); var waveFormat = new WaveFormat(44100, 32, 2); var sourceVoice = new SourceVoice(xaudio2, waveFormat); int bufferSize = waveFormat.ConvertLatencyToByteSize(60000); var dataStream = new DataStream(bufferSize, true, true); int numberOfSamples = bufferSize/waveFormat.BlockAlign; for (int i = 0; i < numberOfSamples; i++) { double vibrato = Math.Cos(2 * Math.PI * 10.0 * i / waveFormat.SampleRate); float value = (float) (Math.Cos(2*Math.PI*(220.0 + 4.0*vibrato)*i/waveFormat.SampleRate)*0.5); dataStream.Write(value); dataStream.Write(value); } dataStream.Position = 0; var audioBuffer = new AudioBuffer {Stream = dataStream, Flags = BufferFlags.EndOfStream, AudioBytes = bufferSize}; var reverb = new Reverb(); var effectDescriptor = new EffectDescriptor(reverb); sourceVoice.SetEffectChain(effectDescriptor); sourceVoice.EnableEffect(0); sourceVoice.SubmitSourceBuffer(audioBuffer, null); sourceVoice.Start(); Console.WriteLine("Play sound"); for(int i = 0; i < 60; i++) { Console.Write("."); Console.Out.Flush(); Thread.Sleep(1000); } }
/// <summary> /// Play a sound file. Supported format are Wav(pcm+adpcm) and XWMA /// </summary> /// <param name="device">The device.</param> /// <param name="text">Text to display</param> /// <param name="fileName">Name of the file.</param> static void PLaySoundFile(XAudio2 device, string text, string fileName) { Console.WriteLine("{0} => {1} (Press esc to skip)", text, fileName); var stream = new SoundStream(File.OpenRead(fileName)); var waveFormat = stream.Format; var buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int) stream.Length, Flags = BufferFlags.EndOfStream }; stream.Close(); var sourceVoice = new SourceVoice(device, waveFormat, true); // Adds a sample callback to check that they are working on source voices sourceVoice.BufferEnd += (context) => Console.WriteLine(" => event received: end of buffer"); sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourceVoice.Start(); int count = 0; while (sourceVoice.State.BuffersQueued > 0 && !IsKeyPressed(ConsoleKey.Escape)) { if (count == 50) { Console.Write("."); Console.Out.Flush(); count = 0; } Thread.Sleep(10); count++; } Console.WriteLine(); sourceVoice.DestroyVoice(); sourceVoice.Dispose(); buffer.Stream.Dispose(); }
private void Initialize(WaveFormat format, byte[] buffer, int offset, int count, int loopStart, int loopLength) { _format = format; _dataStream = DataStream.Create<byte>(buffer, true, false); // Use the loopStart and loopLength also as the range // when playing this SoundEffect a single time / unlooped. _buffer = new AudioBuffer() { Stream = _dataStream, AudioBytes = count, Flags = BufferFlags.EndOfStream, PlayBegin = loopStart, PlayLength = loopLength, Context = new IntPtr(42), }; _loopedBuffer = new AudioBuffer() { Stream = _dataStream, AudioBytes = count, Flags = BufferFlags.EndOfStream, LoopBegin = loopStart, LoopLength = loopLength, LoopCount = AudioBuffer.LoopInfinite, Context = new IntPtr(42), }; }
public void SubmitBuffer(byte[] buffer, int size) { Debug.Assert(m_dataStreams != null, "SourceVoice wasnt created with buffer support"); var dataStream = DataStream.Create(buffer, true, false); AudioBuffer buff = new AudioBuffer(dataStream); buff.Flags = BufferFlags.None; m_dataStreams.Enqueue(dataStream); m_voice.SubmitSourceBuffer(buff, null); }
private void CreateBuffers() { buffers = new AudioBuffer[NumberOfBuffers]; for (int i = 0; i < NumberOfBuffers; i++) buffers[i] = new AudioBuffer(); }
static void Playsound(XAudio2 device, string path, int volume/*, bool ambiance*/) { var stream = new SoundStream(File.OpenRead(path)); bool boolwhile = true; var waveformat = stream.Format; var buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream }; stream.Close(); var sourcevoice = new SourceVoice(device, waveformat, true); sourcevoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourcevoice.SetVolume(volume); sourcevoice.Start(); int count = 0; while (sourcevoice.State.BuffersQueued > 0 && boolwhile) { if (/*ambiance || soundContinue*/42 == 42) { if (count == 50) { //Console.Out.Flush(); count = 0; } //Thread.Sleep(10); count++; } else { boolwhile = false; } } sourcevoice.DestroyVoice(); sourcevoice.Dispose(); buffer.Stream.Dispose(); }
public void SubmitBuffer(byte[] buffer, int size) { var dataStream = DataStream.Create(buffer, true, false); AudioBuffer buff = new AudioBuffer(dataStream); buff.Flags = BufferFlags.None; m_dataStreams.Enqueue(dataStream); m_voice.SubmitSourceBuffer(buff, null); }
protected override void EndBufferChange() { if (AudioBuffer != null) { if (xAudioBuffer == null) xAudioBuffer = new XAudioBuffer(); audioBufferHandle = GCHandle.Alloc(AudioBuffer.RawBuffer, GCHandleType.Pinned); xAudioBuffer.Stream = new DataStream(audioBufferHandle.AddrOfPinnedObject(), AudioBuffer.SizeInBytes, true, true); xAudioBuffer.AudioBytes = (int)xAudioBuffer.Stream.Length; xAudioBuffer.LoopLength = AudioBuffer.RawBuffer.Length / 2; xAudioBuffer.LoopCount = XAudio2.MaximumLoopCount; sourceVoice = new SourceVoice(xAudio, waveFormat); sourceVoice.SubmitSourceBuffer(xAudioBuffer, null); sourceVoice.Start(); } }
public void Dispose() { XAudioBuffer = null; byteBuffer = null; }