/// <summary> /// Loads a sound file from the given resource. /// </summary> /// <param name="resource">The resource to load.</param> public static async Task <CachedSoundFile> FromResourceAsync(ResourceLink resource) { resource.EnsureNotNull(nameof(resource)); CachedSoundFile result = new CachedSoundFile(); using (Stream inStream = await resource.OpenInputStreamAsync()) using (SDXM.SoundStream stream = new SDXM.SoundStream(inStream)) { await Task.Factory.StartNew(() => { // Read all data into the adio buffer SDXM.WaveFormat waveFormat = stream.Format; XA.AudioBuffer buffer = new XA.AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = XA.BufferFlags.EndOfStream }; // Store members result.m_decodedPacketsInfo = stream.DecodedPacketsInfo; result.m_format = waveFormat; result.m_audioBuffer = buffer; }); } return(result); }
public EffectSound(string filename) { lock (loadedSounds) { EffectSound existingSound; if (loadedSounds.TryGetValue(filename, out existingSound)) { Stream = existingSound.Stream; Buffer = existingSound.Buffer; return; } } using (var fileStream = File.OpenRead(filename)) { Stream = new SoundStream(fileStream); Buffer = new AudioBuffer { Stream = Stream.ToDataStream(), AudioBytes = (int)Stream.Length, Flags = BufferFlags.EndOfStream }; Stream.Close(); } lock (loadedSounds) { loadedSounds[filename] = this; } }
private void TryLoadData(Stream fileData) { var soundStream = new SoundStream(fileData); format = soundStream.Format; length = CalculateLengthInSeconds(format, (int)soundStream.Length); buffer = CreateAudioBuffer(soundStream.ToDataStream()); decodedInfo = soundStream.DecodedPacketsInfo; }
public Audio(String fileName) { device = new XAudio2(); masteringVoice = new MasteringVoice(device); stream = new SoundStream(File.OpenRead("Content/"+fileName)); buffer = new AudioBuffer {Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream}; stream.Close(); }
public void Add(int index, UnmanagedMemoryStream sourceStream) { streams[index] = new SoundStream(sourceStream); buffers[index] = new AudioBuffer(); buffers[index].Stream = streams[index].ToDataStream(); buffers[index].AudioBytes = (int)streams[index].Length; buffers[index].Flags = BufferFlags.EndOfStream; voices[index] = new SourceVoice(audio, streams[index].Format); }
//Adds the sound we want to play public void Add(int index, UnmanagedMemoryStream stream) { _soundStreams[index] = new SoundStream(stream); _audioBuffers[index] = new AudioBuffer(); _audioBuffers[index].Stream = _soundStreams[index].ToDataStream(); _audioBuffers[index].AudioBytes = (int) _soundStreams[index].Length; _audioBuffers[index].Flags = BufferFlags.EndOfStream; _sourceVoices[index] = new SourceVoice(_audio, _soundStreams[index].Format); }
public SoundManager(int sounds) { _audio = new XAudio2(); _masteringVoice = new MasteringVoice(_audio); _masteringVoice.SetVolume(0.5f); _soundStreams = new SoundStream[sounds]; _audioBuffers = new AudioBuffer[sounds]; _sourceVoices = new SourceVoice[sounds]; }
private AudioBufferAndMetaData GetBuffer(string soundfile) { var nativefilestream = new NativeFileStream(soundfile, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read); var soundstream = new SoundStream(nativefilestream); var buffer = new AudioBufferAndMetaData { Stream = soundstream.ToDataStream(), AudioBytes = (int)soundstream.Length, Flags = BufferFlags.EndOfStream, WaveFormat = soundstream.Format, DecodedPacketsInfo = soundstream.DecodedPacketsInfo }; return buffer; }
/// <summary> /// Constructor /// </summary> /// <param name="device">Device</param> /// <param name="filename">Filename</param> public SharpAudioVoice(SharpAudioDevice device, string filename) { _stream = new SoundStream(File.OpenRead(filename)); var waveFormat = _stream.Format; _voice = new SourceVoice(device.Device, waveFormat); _buffer = new AudioBuffer { Stream = _stream.ToDataStream(), AudioBytes = (int)_stream.Length, Flags = BufferFlags.EndOfStream }; }
public MyInMemoryWave(MyObjectBuilder_CueDefinition cue, string filename) { m_stream = new SoundStream(File.OpenRead(filename)); m_waveFormat = m_stream.Format; m_buffer = new AudioBuffer { Stream = m_stream.ToDataStream(), AudioBytes = (int)m_stream.Length, Flags = BufferFlags.None }; if (cue.Loopable) m_buffer.LoopCount = AudioBuffer.LoopInfinite; m_stream.Close(); }
/// <summary> /// Function to play an mp3. /// </summary> /// <param name="path">The path to the mp3 file.</param> public async Task PlayMp3Async(string path) { if (_currentPlayback != null) { await _currentPlayback; return; } _tokenSource = new CancellationTokenSource(); _currentPlayback = Task.Run(() => { var stream = new Mm.SoundStream(File.Open(path, FileMode.Open, FileAccess.Read, FileShare.None)); Mm.WaveFormat format = stream.Format; var buffer = new Xa.AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = Xa.BufferFlags.EndOfStream }; stream.Close(); var source = new Xa.SourceVoice(_audio, format); source.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); source.Start(); try { while ((!_tokenSource.Token.IsCancellationRequested) && (!source.IsDisposed) && (source.State.BuffersQueued > 0)) { Thread.Sleep(10); } source.Stop(); } finally { buffer.Stream?.Dispose(); source.Dispose(); stream?.Dispose(); } }, _tokenSource.Token); await _currentPlayback; _currentPlayback = null; }
public MyInMemoryWave(MySoundData cue, string path) { using (var stream = MyFileSystem.OpenRead(path)) { m_stream = new SoundStream(stream); m_waveFormat = m_stream.Format; m_buffer = new AudioBuffer { Stream = m_stream.ToDataStream(), AudioBytes = (int)m_stream.Length, Flags = BufferFlags.None }; if (cue.Loopable) m_buffer.LoopCount = AudioBuffer.LoopInfinite; m_stream.Close(); } }
/// <summary> /// Load the sound data from the sound-file. /// </summary> public override void Load() { var nativeFileStream = new NativeFileStream( File.Path, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read); m_soundStream = new SoundStream(nativeFileStream); m_waveFormat = m_soundStream.Format; m_buffer = new AudioBuffer { Stream = m_soundStream.ToDataStream(), AudioBytes = (int)m_soundStream.Length, Flags = BufferFlags.EndOfStream }; OnLoaded(); }
public SoundEffect(string soundFxPath) { _xaudio = new XAudio2(); var masteringsound = new MasteringVoice(_xaudio); var nativefilestream = new NativeFileStream( soundFxPath, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read); _soundstream = new SoundStream(nativefilestream); _waveFormat = _soundstream.Format; _buffer = new AudioBuffer { Stream = _soundstream.ToDataStream(), AudioBytes = (int)_soundstream.Length, Flags = BufferFlags.EndOfStream }; }
/// <summary> /// Play a sound file. Supported format are Wav(pcm+adpcm) and XWMA /// </summary> /// <param name="device">The device.</param> /// <param name="text">Text to display</param> /// <param name="fileName">Name of the file.</param> static void PLaySoundFile(XAudio2 device, string text, string fileName) { Console.WriteLine("{0} => {1} (Press esc to skip)", text, fileName); var stream = new SoundStream(File.OpenRead(fileName)); var waveFormat = stream.Format; var buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int) stream.Length, Flags = BufferFlags.EndOfStream }; stream.Close(); var sourceVoice = new SourceVoice(device, waveFormat, true); // Adds a sample callback to check that they are working on source voices sourceVoice.BufferEnd += (context) => Console.WriteLine(" => event received: end of buffer"); sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourceVoice.Start(); int count = 0; while (sourceVoice.State.BuffersQueued > 0 && !IsKeyPressed(ConsoleKey.Escape)) { if (count == 50) { Console.Write("."); Console.Out.Flush(); count = 0; } Thread.Sleep(10); count++; } Console.WriteLine(); sourceVoice.DestroyVoice(); sourceVoice.Dispose(); buffer.Stream.Dispose(); }
private void PlatformLoadAudioStream(Stream s) { var soundStream = new SoundStream(s); var dataStream = soundStream.ToDataStream(); var sampleLength = (int)(dataStream.Length / ((soundStream.Format.Channels * soundStream.Format.BitsPerSample) / 8)); CreateBuffers( soundStream.Format, dataStream, 0, sampleLength); }
public static void PlayNote(string AssetPath) { if (AssetPath.Equals("Oud Maquam")) { AssetPath = AssetPath.Replace(" ", "").Replace("M", "m"); } try { XAudio2 xAudio = new XAudio2(); var masteringVoice = new MasteringVoice(xAudio); var nativeFileStream = new NativeFileStream(AssetPath, NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read); SoundStream stream = new SoundStream(nativeFileStream); var waveFormat = stream.Format; AudioBuffer buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream }; var sourceVoice = new SourceVoice(xAudio, waveFormat, true); sourceVoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourceVoice.Start(); /* SourceVoice sound; notes.TryGetValue(AssetPath, out sound); sound.Stop(); * sound.Start();*/ } catch (Exception) { } }
/// <summary> /// Opens the specified stream. /// </summary> /// /// <param name="stream">Stream to open.</param> /// /// <returns>Returns number of frames found in the specified stream.</returns> /// public int Open(SoundStream stream) { this.waveStream = stream; this.channels = stream.Format.Channels; this.blockAlign = stream.Format.BlockAlign; this.numberOfFrames = (int)stream.Length / blockAlign; this.sampleRate = stream.Format.SampleRate; this.numberOfSamples = numberOfFrames * Channels; this.duration = (int)(numberOfFrames / (double)sampleRate * 1000.0); this.bitsPerSample = stream.Format.BitsPerSample; this.averageBitsPerSecond = stream.Format.AverageBytesPerSecond * 8; return numberOfFrames; }
private void btnXAudio2_Click(object sender, RoutedEventArgs e) { XAudio2 xaudio; MasteringVoice masteringVoice; xaudio = new XAudio2(); masteringVoice = new MasteringVoice(xaudio); var nativefilestream = new NativeFileStream( @"Assets\Clk_1Sec1.wav", NativeFileMode.Open, NativeFileAccess.Read, NativeFileShare.Read); var soundstream = new SoundStream(nativefilestream); var waveFormat = soundstream.Format; var buffer = new AudioBuffer { Stream = soundstream.ToDataStream(), AudioBytes = (int)soundstream.Length, Flags = BufferFlags.EndOfStream, }; var sourceVoice = new SourceVoice(xaudio, waveFormat, true); // There is also support for shifting the frequency. sourceVoice.SetFrequencyRatio(1.0f); sourceVoice.SubmitSourceBuffer(buffer, soundstream.DecodedPacketsInfo); sourceVoice.Start(); }
private void PlatformLoadAudioStream(Stream s) { var soundStream = new SoundStream(s); var dataStream = soundStream.ToDataStream(); CreateBuffers( soundStream.Format, dataStream, 0, (int)dataStream.Length); }
private static AudioBufferAndMetaData GetBuffer(Stream soundfile) { var soundstream = new SoundStream(soundfile); var buffer = new AudioBufferAndMetaData() { Stream = soundstream.ToDataStream(), AudioBytes = (int)soundstream.Length, Flags = BufferFlags.EndOfStream, WaveFormat = soundstream.Format, DecodedPacketsInfo = soundstream.DecodedPacketsInfo }; return buffer; }
static void Playsound(XAudio2 device, string path, int volume/*, bool ambiance*/) { var stream = new SoundStream(File.OpenRead(path)); bool boolwhile = true; var waveformat = stream.Format; var buffer = new AudioBuffer { Stream = stream.ToDataStream(), AudioBytes = (int)stream.Length, Flags = BufferFlags.EndOfStream }; stream.Close(); var sourcevoice = new SourceVoice(device, waveformat, true); sourcevoice.SubmitSourceBuffer(buffer, stream.DecodedPacketsInfo); sourcevoice.SetVolume(volume); sourcevoice.Start(); int count = 0; while (sourcevoice.State.BuffersQueued > 0 && boolwhile) { if (/*ambiance || soundContinue*/42 == 42) { if (count == 50) { //Console.Out.Flush(); count = 0; } //Thread.Sleep(10); count++; } else { boolwhile = false; } } sourcevoice.DestroyVoice(); sourcevoice.Dispose(); buffer.Stream.Dispose(); }