public void PlaySound(Tone tone, TimeSpan duration) { Deployment.Current.Dispatcher.BeginInvoke(() => { if (_timer == null) { _timer = new DispatcherTimer { Interval = TimeSpan.FromMilliseconds(33) }; _timer.Tick += delegate { try { FrameworkDispatcher.Update(); } catch { } }; } if (_timer.IsEnabled) { _timer.Stop(); } _timeLeft = duration; FrameworkDispatcher.Update(); _frequency = tone; _dynamicSound = new DynamicSoundEffectInstance(SampleRate, AudioChannels.Mono); _dynamicSound.BufferNeeded += dynamicSound_BufferNeeded; _dynamicSound.Play(); _bufferSize = _dynamicSound.GetSampleSizeInBytes(TimeSpan.FromSeconds(1)); _soundBuffer = new byte[_bufferSize]; _timer.Start(); }); }
/* ****************************************************** *** BackgroundMusicPlayer *** Kevin Anderson ****************************************************** *** Required constructor for BackgroundMusicPlayer. *** Method Inputs: *** waveFileStream - Music file. *** Return value: *** NA ****************************************************** *** 11/13/2015 ****************************************************** */ public BackgroundMusicPlayer(System.IO.Stream waveFileStream) { this.waveFileStream = waveFileStream; reader = new BinaryReader(this.waveFileStream); chunkID = reader.ReadInt32(); fileSize = reader.ReadInt32(); riffType = reader.ReadInt32(); fmtID = reader.ReadInt32(); fmtSize = reader.ReadInt32(); fmtCode = reader.ReadInt16(); channels = reader.ReadInt16(); sampleRate = reader.ReadInt32(); fmtAvgBPS = reader.ReadInt32(); fmtBlockAlign = reader.ReadInt16(); bitDepth = reader.ReadInt16(); if (fmtSize == 18) { // Read any extra values. fmtExtraSize = reader.ReadInt16(); reader.ReadBytes(fmtExtraSize); } dataID = reader.ReadInt32(); dataSize = reader.ReadInt32(); byteArray = reader.ReadBytes(dataSize); dynamicSound = new DynamicSoundEffectInstance(sampleRate, (AudioChannels)channels); count = dynamicSound.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(100)); dynamicSound.BufferNeeded += new EventHandler <EventArgs>(DynamicSound_BufferNeeded); }
private void TriggerWavePlay() { int sampleFactor = MultiplySamples(); Channel_Out.Stop(); Channel_Out.Play(); //take a sample var sampleLength = Channel_Out.GetSampleSizeInBytes(TimeSpan.FromSeconds(SoundLength)); while (Channel_Out.PendingBufferCount < 3) { waveBuffer = new short[10 * pendingSamples.Count * sampleFactor]; for (int i = 0; i < 10 * pendingSamples.Count; i++) { for (int j = 0; j < sampleFactor; j++) { waveBuffer[sampleFactor * i + j] = pendingSamples[i % pendingSamples.Count]; } wavePatternIndex++; } if (waveBuffer.Length > 0) { bBuffer = waveBuffer.SelectMany(x => BitConverter.GetBytes(x)).ToArray(); Channel_Out.SubmitBuffer(bBuffer); bytesWritten += bBuffer.Length; } } if (bytesWritten > bytesToWrite) { Restart = false; } }
public XnaSound(int sampleRate, int bufferCount) { const int frameRate = 50; const int channelCount = 2; if ((sampleRate % frameRate) != 0) { throw new ArgumentOutOfRangeException("sampleRate", "Sample rate must be a multiple of 50!"); } m_sampleRate = sampleRate; m_bufferLength = (sampleRate / frameRate) * channelCount * 2; m_soundEffect = new DynamicSoundEffectInstance( sampleRate, channelCount == 1 ? AudioChannels.Mono : AudioChannels.Stereo); var needSize = m_soundEffect.GetSampleSizeInBytes( TimeSpan.FromMilliseconds(20)); var bufferCountReal = needSize * bufferCount / m_bufferLength; if (bufferCountReal < bufferCount) { bufferCountReal = bufferCount; } for (var i = 0; i < bufferCountReal; i++) { m_fillQueue.Enqueue(new byte[m_bufferLength]); } m_soundEffect.BufferNeeded += SoundEffect_OnBufferNeeded; m_soundEffect.Play(); }
/// <summary> /// インスタンスを生成します。 /// StreamingWave が存在する間、ストリーミングのために input はオープン状態が継続され、 /// Dispose メソッドでその Dispose メソッドが呼び出されます。 /// </summary> /// <param name="input">Wave ファイルの Stream。</param> /// <param name="bufferDuration">バッファリングする再生時間。</param> public StreamingWave(Stream input, TimeSpan bufferDuration) { if (input == null) { throw new ArgumentNullException("input"); } this.input = input; reader = new BinaryReader(input); // 'data' chunk のデータ部の直前まで読み込みます。 riffChunk = RiffChunk.ReadFrom(reader); formatChunk = WaveFormatChunk.ReadFrom(reader); dataChunkHeader = ChunkHeader.ReadFrom(reader); // 'data' chunk のデータ部の開始位置を記憶します。 dataOffset = input.Position; int sampleRate = (int)formatChunk.SampleRate; AudioChannels channels = (AudioChannels)formatChunk.Channels; dynamicSound = new DynamicSoundEffectInstance(sampleRate, channels); dynamicSound.BufferNeeded += new EventHandler <EventArgs>(OnDynamicSoundBufferNeeded); bufferSize = dynamicSound.GetSampleSizeInBytes(bufferDuration); buffer = new byte[bufferSize]; readDataAsyncCaller = new ReadDataAsyncCaller(ReadData); }
private void InitializeAudio() { dynamicSound = new DynamicSoundEffectInstance(22050, AudioChannels.Mono); soundBuffer = new byte[dynamicSound.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(250))]; //dynamicSound.BufferNeeded += new EventHandler<EventArgs>(DynamicSoundBufferNeeded); emulator.Mikey.AudioFilter.BufferReady += new EventHandler <BufferEventArgs>(OnAudioFilterBufferReady); dynamicSound.Play(); }
public AudioSource() { DSEI = new DynamicSoundEffectInstance(SampleRate, AudioChannels.Mono); BufferSize = DSEI.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(500)); Buffer = new byte[BufferSize]; DSEI.Volume = 0.4f; DSEI.IsLooped = false; }
OggSong(VorbisReader _reader) { reader = _reader; effect = new DynamicSoundEffectInstance(reader.SampleRate, (AudioChannels)reader.Channels); buffer = new byte[effect.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(500))]; nvBuffer = new float[buffer.Length / 2]; // When a buffer is needed, set our handle so the helper thread will read in more data effect.BufferNeeded += (s, e) => needBufferHandle.Set(); }
public OggSong(string oggFile) { reader = new VorbisReader(oggFile); sound = new DynamicSoundEffectInstance(reader.SampleRate, (AudioChannels)reader.Channels); buffer = new byte[sound.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(500))]; nvBuffer = new float[buffer.Length / 2]; // when a buffer is needed, set our handle so the helper thread will read in more data sound.BufferNeeded += (s, e) => needBufferHandle.Set(); }
/// <summary> /// Constructor. /// </summary> public XnaAudio() { // Event handler for getting audio data when the buffer is full microphone.BufferDuration = TimeSpan.FromMilliseconds(100); microphone.BufferReady += new EventHandler <EventArgs>(microphone_BufferReady); // initialize dynamic sound effect instance playback = new DynamicSoundEffectInstance(microphone.SampleRate, AudioChannels.Mono); playback.BufferNeeded += GetSamples; sampleSize = playback.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(100)); }
public void GetSampleSizeInBytes() { var monoInstance = new DynamicSoundEffectInstance(48000, AudioChannels.Mono); var stereoInstance = new DynamicSoundEffectInstance(22050, AudioChannels.Stereo); // Zero length Assert.AreEqual(0, monoInstance.GetSampleSizeInBytes(TimeSpan.Zero)); Assert.AreEqual(0, stereoInstance.GetSampleSizeInBytes(TimeSpan.Zero)); // Nonzero length Assert.AreEqual(96000, monoInstance.GetSampleSizeInBytes(TimeSpan.FromSeconds(1))); Assert.AreEqual(88200, stereoInstance.GetSampleSizeInBytes(TimeSpan.FromSeconds(1))); // Negative length Assert.Throws <ArgumentOutOfRangeException>(() => { monoInstance.GetSampleSizeInBytes(TimeSpan.FromSeconds(-1)); }); // Disposed monoInstance.Dispose(); Assert.Throws <ObjectDisposedException>(() => { monoInstance.GetSampleSizeInBytes(TimeSpan.Zero); }); stereoInstance.Dispose(); }
public VGMSong(string file) { _instance = new DynamicSoundEffectInstance(44100, (AudioChannels)2); _buffer = new byte[_instance.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(150))]; _intBuffer = new int[_buffer.Length / 2]; _instance.BufferNeeded += StreamVGM; OpenVGMFile(file); int Sound_Rate = 44100; int Clock_NTSC = (int)_VGMHead.lngHzYM2612; _chip.Initialize(Clock_NTSC, Sound_Rate); _psg.Initialize(_VGMHead.lngHzPSG); }
private void GetSoundBuffer(object sender, EventArgs e) { var buffer = new byte[_dynamicSound.GetSampleSizeInBytes(TimeSpan.FromSeconds(1))]; const double sampleSize = 48000 * 2; const double t = (Math.PI) / sampleSize; for (long i = 0; (i + 1) < buffer.Length; i += 2) { var theta = i * t * _currentToneFrequency; float amplitude = (int)(Math.Sin((float)theta) * 64); var oldAmplitude = (float)((buffer[i] << 8) | buffer[i + 1]); amplitude = (amplitude + oldAmplitude) * 0.5f; var data = (int)amplitude; buffer[i] = (byte)(data >> 8); buffer[i + 1] = (byte)data; } _dynamicSound.SubmitBuffer(buffer); }
/// <summary> /// Load a wav file into the stream to be played. /// </summary> public void LoadWavFromFileToStream(string p) { // Create a new SpriteBatch, which can be used to draw textures. string file = p; System.IO.Stream waveFileStream = File.OpenRead(file); //TitleContainer.OpenStream(file); BinaryReader reader = new BinaryReader(waveFileStream); int chunkID = reader.ReadInt32(); int fileSize = reader.ReadInt32(); int riffType = reader.ReadInt32(); int fmtID = reader.ReadInt32(); int fmtSize = reader.ReadInt32(); int fmtCode = reader.ReadInt16(); int channels = reader.ReadInt16(); int sampleRate = reader.ReadInt32(); int fmtAvgBPS = reader.ReadInt32(); int fmtBlockAlign = reader.ReadInt16(); int bitDepth = reader.ReadInt16(); if (fmtSize == 18) { // Read any extra values int fmtExtraSize = reader.ReadInt16(); reader.ReadBytes(fmtExtraSize); } int dataID = reader.ReadInt32(); int dataSize = reader.ReadInt32(); byteArray = reader.ReadBytes(dataSize); dynamicSound = new DynamicSoundEffectInstance(sampleRate, (AudioChannels)channels); count = dynamicSound.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(10000)); dynamicSound.BufferNeeded += new EventHandler <EventArgs>(DynamicSound_BufferNeeded); this.currentSong = new Song(p); }
/// <summary> /// Load the song from the path so that we can stream it. /// </summary> /// <returns></returns> public DynamicSoundEffectInstance loadSongIntoStream() { System.IO.Stream waveFileStream = TitleContainer.OpenStream(this.fileLocation); BinaryReader reader = new BinaryReader(waveFileStream); int chunkID = reader.ReadInt32(); int fileSize = reader.ReadInt32(); int riffType = reader.ReadInt32(); int fmtID = reader.ReadInt32(); int fmtSize = reader.ReadInt32(); int fmtCode = reader.ReadInt16(); int channels = reader.ReadInt16(); int sampleRate = reader.ReadInt32(); int fmtAvgBPS = reader.ReadInt32(); int fmtBlockAlign = reader.ReadInt16(); int bitDepth = reader.ReadInt16(); if (fmtSize == 18) { // Read any extra values int fmtExtraSize = reader.ReadInt16(); reader.ReadBytes(fmtExtraSize); } int dataID = reader.ReadInt32(); int dataSize = reader.ReadInt32(); byteArray = reader.ReadBytes(dataSize); dynamicSound = new DynamicSoundEffectInstance(sampleRate, (AudioChannels)channels); count = dynamicSound.GetSampleSizeInBytes(TimeSpan.FromMilliseconds(100)); bufferHandler = new EventHandler <EventArgs>(DynamicSound_BufferNeeded); dynamicSound.BufferNeeded += bufferHandler; return(this.dynamicSound); }