public XnaBytePusherIODriver(GraphicsDevice gd) { this.gd = gd; // start audio device soundEffect = new DynamicSoundEffectInstance(15360, AudioChannels.Mono); soundEffect.Play(); }
public void TestDispose() { var crossDisposedEngine = AudioEngineFactory.NewAudioEngine(); var engine = AudioEngineFactory.NewAudioEngine(); crossDisposedEngine.Dispose(); // Check there no Dispose problems with sereval cross-disposed instances. // Create some SoundEffects SoundEffect soundEffect; using (var wavStream = AssetManager.FileProvider.OpenStream("EffectBip", VirtualFileMode.Open, VirtualFileAccess.Read)) { soundEffect = SoundEffect.Load(engine, wavStream); } SoundEffect dispSoundEffect; using (var wavStream = AssetManager.FileProvider.OpenStream("EffectBip", VirtualFileMode.Open, VirtualFileAccess.Read)) { dispSoundEffect = SoundEffect.Load(engine, wavStream); } dispSoundEffect.Dispose(); var soundEffectInstance = soundEffect.CreateInstance(); var dispInstance = soundEffect.CreateInstance(); dispInstance.Dispose(); // Create some SoundMusics. var soundMusic1 = SoundMusic.Load(engine, AssetManager.FileProvider.OpenStream("MusicBip", VirtualFileMode.Open, VirtualFileAccess.Read)); var soundMusic2 = SoundMusic.Load(engine, AssetManager.FileProvider.OpenStream("MusicToneA", VirtualFileMode.Open, VirtualFileAccess.Read)); soundMusic2.Dispose(); // Create some dynamicSounds. var generator = new SoundGenerator(); var dynSound1 = new DynamicSoundEffectInstance(engine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); var dynSound2 = new DynamicSoundEffectInstance(engine, 20000, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); dynSound1.Play(); dynSound1.SubmitBuffer(generator.Generate(44100, new[]{ 1000f }, 1, 120000)); dynSound2.Dispose(); // Start playing some soundEffectInstance.Play(); soundMusic1.Play(); for (int i = 0; i < 10; i++) { engine.Update(); Utilities.Sleep(5); } Assert.DoesNotThrow(engine.Dispose, "AudioEngine crashed during disposal."); Assert.IsTrue(soundEffect.IsDisposed, "SoundEffect is not disposed."); Assert.Throws<InvalidOperationException>(engine.Dispose, "AudioEngine did not threw invalid operation exception."); Assert.AreEqual(SoundPlayState.Stopped, soundEffectInstance.PlayState, "SoundEffectInstance has not been stopped properly."); Assert.IsTrue(soundEffectInstance.IsDisposed, "SoundEffectInstance has not been disposed properly."); Assert.AreEqual(SoundPlayState.Stopped, soundMusic1.PlayState, "soundMusic1 has not been stopped properly."); Assert.IsTrue(soundMusic1.IsDisposed, "soundMusic1 has not been disposed properly."); //Assert.AreEqual(SoundPlayState.Stopped, dynSound1.PlayState, "The dynamic sound 1 has not been stopped correctly."); //Assert.IsTrue(dynSound1.IsDisposed, "The dynamic sound 1 has not been disposed correctly."); }
public void TestConstructor() { ///////////////////////////////////////////////////////////////////////////////////////// // 1. Test that Constructor throws ArgumentOutOfRangeException with invalid parameters Assert.Throws<ArgumentOutOfRangeException>(() => new DynamicSoundEffectInstance(defaultEngine, 7999, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits), "Did not throw argumentOutOfRangeException with too low sample rate"); Assert.Throws<ArgumentOutOfRangeException>(() => new DynamicSoundEffectInstance(defaultEngine, 48001, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits), "Did not throw argumentOutOfRangeException with too high sample rate"); Assert.Throws<ArgumentOutOfRangeException>(() => new DynamicSoundEffectInstance(defaultEngine, 44100, (AudioChannels)0, AudioDataEncoding.PCM_8Bits), "Did not throw argumentOutOfRangeException with invalid AudioChannels"); Assert.Throws<ArgumentOutOfRangeException>(() => new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, (AudioDataEncoding)0), "Did not throw argumentOutOfRangeException with invalid AudioDataEncoding"); //////////////////////////////////////////////////// // 2. Test that a valid construction does not crash DynamicSoundEffectInstance instance = null; Assert.DoesNotThrow(() => instance = new DynamicSoundEffectInstance(defaultEngine, 34567, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits)); instance.Dispose(); }
public void TestIsLooped() { mono8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); var dispInstance = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_16Bits); dispInstance.Dispose(); bool looped; ///////////////////////////////////////////////////////////////////////////////////////////// // 1. Check that get and set IsLooped for an Disposed instance throw the 'ObjectDisposedException' Assert.Throws<ObjectDisposedException>(() => looped = dispInstance.IsLooped, "DynamicSoundEffectInstance.IsLooped { get } did not throw the 'ObjectDisposedException' when called from a disposed object."); Assert.Throws<ObjectDisposedException>(() => dispInstance.IsLooped = false, "DynamicSoundEffectInstance.IsLooped { set } did not throw the 'ObjectDisposedException' when called from a disposed object."); ////////////////////////////////////////////////////////////////// // 2. Check that IsLooped = true throws InvalidOperationException Assert.Throws<InvalidOperationException>(() => mono8Bits.IsLooped = true, "DynamicSoundEffectInstance.IsLooped { set } did not throw the 'InvalidOperationException' when called with 'true'."); //////////////////////////////////////////////// // 3. Check that the value of IsLooped is false Assert.IsFalse(mono8Bits.IsLooped, "DynamicSoundEffectInstance.IsLooped { get } did not return 'false'."); mono8Bits.Dispose(); }
public Texture2D GetTexture() { checkDisposed(); if (Video == null) { throw new InvalidOperationException(); } // Be sure we can even get something from TheoraPlay... if (State == MediaState.Stopped || Video.theoraDecoder == IntPtr.Zero || TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0 || TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) == 0) { // Screw it, give them the old one. return(videoTexture[0].RenderTarget as Texture2D); } // Get the latest video frames. bool missedFrame = false; while (nextVideo.playms <= timer.ElapsedMilliseconds && !missedFrame) { currentVideo = nextVideo; IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder); if (nextFrame != IntPtr.Zero) { TheoraPlay.THEORAPLAY_freeVideo(previousFrame); previousFrame = Video.videoStream; Video.videoStream = nextFrame; nextVideo = TheoraPlay.getVideoFrame(Video.videoStream); missedFrame = false; } else { // Don't mind me, just ignoring that complete failure above! missedFrame = true; } if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0) { // FIXME: This is part of the Duration hack! Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms); // Stop and reset the timer. If we're looping, the loop will start it again. timer.Stop(); timer.Reset(); // If looping, go back to the start. Otherwise, we'll be exiting. if (IsLooped && State == MediaState.Playing) { // Kill the audio, no matter what. if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } // Free everything and start over. TheoraPlay.THEORAPLAY_freeVideo(previousFrame); previousFrame = IntPtr.Zero; Video.AttachedToPlayer = false; Video.Dispose(); Video.AttachedToPlayer = true; Video.Initialize(); // Grab the initial audio again. if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0) { InitAudioStream(); } // Grab the initial video again. if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0) { currentVideo = TheoraPlay.getVideoFrame(Video.videoStream); previousFrame = Video.videoStream; do { // The decoder miiight not be ready yet. Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder); } while (Video.videoStream == IntPtr.Zero); nextVideo = TheoraPlay.getVideoFrame(Video.videoStream); } // Start! Again! timer.Start(); if (audioStream != null) { audioStream.Play(); } } else { // Stop everything, clean up. We out. State = MediaState.Stopped; if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } TheoraPlay.THEORAPLAY_freeVideo(previousFrame); Video.AttachedToPlayer = false; Video.Dispose(); // We're done, so give them the last frame. return(videoTexture[0].RenderTarget as Texture2D); } } } // Set up an environment to muck about in. GL_pushState(); // Prepare YUV GL textures with our current frame data currentDevice.GLDevice.SetTextureData2DPointer( yuvTextures[0], currentVideo.pixels ); currentDevice.GLDevice.SetTextureData2DPointer( yuvTextures[1], new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) ) ); currentDevice.GLDevice.SetTextureData2DPointer( yuvTextures[2], new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) + (currentVideo.width / 2 * currentVideo.height / 2) ) ); // Draw the YUV textures to the framebuffer with our shader. currentDevice.DrawPrimitives( PrimitiveType.TriangleStrip, 0, 2 ); // Clean up after ourselves. GL_popState(); // Finally. return(videoTexture[0].RenderTarget as Texture2D); }
public XNAMP3(string path) { _Stream = new MP3Stream(path, NUMBER_OF_PCM_BYTES_TO_READ_PER_CHUNK); _Instance = new DynamicSoundEffectInstance(_Stream.Frequency, AudioChannels.Stereo); }
public void TestDispose() { var crossDisposedEngine = new AudioEngine(); var engine = new AudioEngine(); crossDisposedEngine.Dispose(); // Check there no Dispose problems with sereval cross-disposed instances. // Create some SoundEffects SoundEffect soundEffect; using (var wavStream = AssetManager.FileProvider.OpenStream("EffectBip", VirtualFileMode.Open, VirtualFileAccess.Read)) { soundEffect = SoundEffect.Load(engine, wavStream); } SoundEffect dispSoundEffect; using (var wavStream = AssetManager.FileProvider.OpenStream("EffectBip", VirtualFileMode.Open, VirtualFileAccess.Read)) { dispSoundEffect = SoundEffect.Load(engine, wavStream); } dispSoundEffect.Dispose(); var soundEffectInstance = soundEffect.CreateInstance(); var dispInstance = soundEffect.CreateInstance(); dispInstance.Dispose(); // Create some SoundMusics. var soundMusic1 = SoundMusic.Load(engine, AssetManager.FileProvider.OpenStream("MusicBip", VirtualFileMode.Open, VirtualFileAccess.Read)); var soundMusic2 = SoundMusic.Load(engine, AssetManager.FileProvider.OpenStream("MusicToneA", VirtualFileMode.Open, VirtualFileAccess.Read)); soundMusic2.Dispose(); // Create some dynamicSounds. var generator = new SoundGenerator(); var dynSound1 = new DynamicSoundEffectInstance(engine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); var dynSound2 = new DynamicSoundEffectInstance(engine, 20000, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); dynSound1.Play(); dynSound1.SubmitBuffer(generator.Generate(44100, new[] { 1000f }, 1, 120000)); dynSound2.Dispose(); // Start playing some soundEffectInstance.Play(); soundMusic1.Play(); for (int i = 0; i < 10; i++) { engine.Update(); Utilities.Sleep(5); } Assert.DoesNotThrow(engine.Dispose, "AudioEngine crashed during disposal."); Assert.IsTrue(soundEffect.IsDisposed, "SoundEffect is not disposed."); Assert.Throws <InvalidOperationException>(engine.Dispose, "AudioEngine did not threw invalid operation exception."); Assert.AreEqual(SoundPlayState.Stopped, soundEffectInstance.PlayState, "SoundEffectInstance has not been stopped properly."); Assert.IsTrue(soundEffectInstance.IsDisposed, "SoundEffectInstance has not been disposed properly."); Assert.AreEqual(SoundPlayState.Stopped, soundMusic1.PlayState, "soundMusic1 has not been stopped properly."); Assert.IsTrue(soundMusic1.IsDisposed, "soundMusic1 has not been disposed properly."); //Assert.AreEqual(SoundPlayState.Stopped, dynSound1.PlayState, "The dynamic sound 1 has not been stopped correctly."); //Assert.IsTrue(dynSound1.IsDisposed, "The dynamic sound 1 has not been disposed correctly."); }
public NoiseChannel(DynamicSoundEffectInstance soundOutput) : base(soundOutput) { Channel_Out.Play(); }
public void TestImplementationSpecific() { bufferNeededHasBeenCalled = false; //////////////////////////////////////////////////////////////////////////////////////////////// // 1. Check that worker process buffer needed requests with the first instance of dynamic sound. var instance1 = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); instance1.BufferNeeded += SetBufferNeededHasBeenCalledToTrue; instance1.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called with a first single instance."); bufferNeededHasBeenCalled = false; instance1.Stop(); //////////////////////////////////////////////////////////////////////////////////////////////// // 2. Check that worker process buffer needed requests with the second instance of dynamic sound. var instance2 = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); instance2.BufferNeeded += SetBufferNeededHasBeenCalledToTrue; instance2.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called with a second instance."); bufferNeededHasBeenCalled = false; instance2.Stop(); ////////////////////////////////////////////////////////////////////////////////////////////////////////// // 3. Check that worker process buffer needed requests of the second instance when the first is disposed. instance1.Dispose(); instance2.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called with a second single instance."); bufferNeededHasBeenCalled = false; instance2.Stop(); /////////////////////////////////////////////////////////////////////////////////////////////////////// // 4. Check that the worker is correctly recreated when the number of dynamic instances have reached 0 instance2.Dispose(); instance1 = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); instance1.BufferNeeded += SetBufferNeededHasBeenCalledToTrue; instance1.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called with a single instance after destruct of all instances."); bufferNeededHasBeenCalled = false; instance1.Stop(); /////////////////////////////////////////////////////////////////////////////////////////////////// // 5. Play several dynamic at the same time to check that there is not problem with a single worker dynGenSound = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); dynGenSound.BufferNeeded += SubmitDynGenSound; GCHandle pinnedDataWave1; GCHandle pinnedDataStereo; GCHandle pinnedDataSayuriPart; LoadWaveFileIntoBuffers(out pinnedDataWave1, out wave1, "EffectBip"); LoadWaveFileIntoBuffers(out pinnedDataStereo, out stereo, "EffectStereo"); LoadWaveFileIntoBuffers(out pinnedDataSayuriPart, out sayuriPart, "EffectFishLamp"); wave1.Instance.BufferNeeded += SubmitWave1; stereo.Instance.BufferNeeded += SubmitStereo; sayuriPart.Instance.BufferNeeded += SubmitSayuriPart; // plays all the instances together to see wave1.Instance.Play(); stereo.Instance.Play(); sayuriPart.Instance.Play(); dynGenSound.Play(); Utilities.Sleep(5000); wave1.Instance.Stop(); stereo.Instance.Stop(); sayuriPart.Instance.Stop(); dynGenSound.Stop(); Utilities.Sleep(100); // avoid crash due to ObjectDisposedException dynGenSound.Dispose(); wave1.Instance.Dispose(); stereo.Instance.Dispose(); sayuriPart.Instance.Dispose(); pinnedDataWave1.Free(); pinnedDataStereo.Free(); pinnedDataSayuriPart.Free(); }
public void TestPlayableInterface() { WaveFormat dataFormat; using (var stream = AssetManager.FileProvider.OpenStream("EffectFishLamp", VirtualFileMode.Open, VirtualFileAccess.Read)) { var memoryStream = new MemoryStream((int)stream.Length); stream.CopyTo(memoryStream); memoryStream.Position = 0; var waveStreamReader = new SoundStream(memoryStream); dataFormat = waveStreamReader.Format; bufferData = new byte[waveStreamReader.Length]; if (waveStreamReader.Read(bufferData, 0, (int)waveStreamReader.Length) != waveStreamReader.Length) throw new AudioSystemInternalException("The data length read in wave soundStream does not correspond to the stream's length."); } dynSEInstance = new DynamicSoundEffectInstance(defaultEngine, dataFormat.SampleRate, (AudioChannels)dataFormat.Channels, (AudioDataEncoding)dataFormat.BitsPerSample); dynSEInstance.BufferNeeded += SubmitBuffer; ////////////////// // 1. Test play dynSEInstance.Play(); Utilities.Sleep(2000); Assert.AreEqual(SoundPlayState.Playing, dynSEInstance.PlayState, "Music is not playing"); ////////////////// // 2. Test Pause dynSEInstance.Pause(); Utilities.Sleep(600); Assert.AreEqual(SoundPlayState.Paused, dynSEInstance.PlayState, "Music is not Paused"); dynSEInstance.Play(); Utilities.Sleep(1000); ////////////////// // 2. Test Stop dynSEInstance.Stop(); bufferCount = 0; Utilities.Sleep(600); Assert.AreEqual(SoundPlayState.Stopped, dynSEInstance.PlayState, "Music is not Stopped"); dynSEInstance.Play(); Utilities.Sleep(9000); /////////////////// // 3. Test ExitLoop Assert.DoesNotThrow(dynSEInstance.ExitLoop, "ExitLoop crached"); /////////////// // 4. Volume var value = 1f; var sign = -1f; while (value <= 1f) { dynSEInstance.Volume = value; value += sign * 0.01f; Utilities.Sleep(30); if (value < -0.2) sign = 1f; } Utilities.Sleep(2000); ////////////////// // 5.Pan value = 0; sign = -1f; while (value <= 1f) { dynSEInstance.Pan = value; value += sign * 0.01f; Utilities.Sleep(30); if (value < -1.2) sign = 1f; } dynSEInstance.Pan = 0; Utilities.Sleep(2000); //////////////////////////////////////////////////////////////////////////// // 7. Wait until the end of the stream to check that there are not crashes Utilities.Sleep(50000); dynSEInstance.Dispose(); }
public void TestBufferNeeded() { mono8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); mono8Bits.BufferNeeded += SetBufferNeededHasBeenCalledToTrue; var sizeOfOneSubBuffer = 44100 * 200 / 1000; #if SILICONSTUDIO_PLATFORM_ANDROID sizeOfOneSubBuffer = mono8Bits.SubBufferSize; #endif //////////////////////////////////////////////////////////////////////////////////////////////////////// // 1. Check that BufferNeeded is thrown when the user call plays with insufficient number of audio data mono8Bits.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user played without any buffers"); bufferNeededHasBeenCalled = false; mono8Bits.Stop(); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, 1000)); Utilities.Sleep(50); bufferNeededHasBeenCalled = false; mono8Bits.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user played wit one buffers"); bufferNeededHasBeenCalled = false; mono8Bits.Stop(); /////////////////////////////////////////////////////////////////////////////////////////////////////////////// // 2. Check that BufferNeeded is thrown when the user call SubmitBuffer with insufficient number of audio data mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user submit the first buffer"); bufferNeededHasBeenCalled = false; mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user submit the second buffer"); bufferNeededHasBeenCalled = false; mono8Bits.Stop(); //////////////////////////////////////////////////////////////////////////////////////////////////// // 3. Check that BufferNeeded is thrown when the number of buffers falls from 3 to 2, 2 to 1, 1 to 0 mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); Utilities.Sleep(50); bufferNeededHasBeenCalled = false; mono8Bits.Play(); var lastBufferCount = mono8Bits.PendingBufferCount; var loopCount = 0; while (true) { Utilities.Sleep(10); if (lastBufferCount != mono8Bits.PendingBufferCount) { lastBufferCount = mono8Bits.PendingBufferCount; Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when number of buffer pass from "+(lastBufferCount+1)+" to "+lastBufferCount); bufferNeededHasBeenCalled = false; } if (lastBufferCount == 0) break; ++loopCount; if(loopCount>100) Assert.Fail("The test process is block in the loop."); } mono8Bits.Stop(); /////////////////////////////////////////////////////////////////////////// // 4. Check that invocation of BufferNeeded does not block audio playback mono8Bits.BufferNeeded -= SetBufferNeededHasBeenCalledToTrue; mono8Bits.BufferNeeded += GenerateNextDataAndBlockThead; mono8Bits.Play(); Utilities.Sleep(2000); mono8Bits.Stop(); mono8Bits.BufferNeeded -= GenerateNextDataAndBlockThead; mono8Bits.Dispose(); }
/// <summary> /// Null the song out so that we can remove it from memory and switch to another song??? /// </summary> public void unloadSongFromStream() { dynamicSound.Stop(); dynamicSound.BufferNeeded -= bufferHandler; dynamicSound = null; }
public Texture2D GetTexture() { checkDisposed(); // Be sure we can even get something from TheoraPlay... if (State == MediaState.Stopped || Video.theoraDecoder == IntPtr.Zero || TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0 || TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) == 0) { return(videoTexture); // Screw it, give them the old one. } // Get the latest video frames. bool missedFrame = false; while (nextVideo.playms <= timer.ElapsedMilliseconds && !missedFrame) { currentVideo = nextVideo; IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder); if (nextFrame != IntPtr.Zero) { TheoraPlay.THEORAPLAY_freeVideo(previousFrame); previousFrame = Video.videoStream; Video.videoStream = nextFrame; nextVideo = TheoraPlay.getVideoFrame(Video.videoStream); missedFrame = false; } else { // Don't mind me, just ignoring that complete failure above! missedFrame = true; } if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0) { // FIXME: This is part of the Duration hack! Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms); // Stop and reset the timer. If we're looping, the loop will start it again. timer.Stop(); timer.Reset(); // If looping, go back to the start. Otherwise, we'll be exiting. if (IsLooped && State == MediaState.Playing) { // Kill the audio, no matter what. if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } // Free everything and start over. TheoraPlay.THEORAPLAY_freeVideo(previousFrame); previousFrame = IntPtr.Zero; Video.AttachedToPlayer = false; Video.Dispose(); Video.AttachedToPlayer = true; Video.Initialize(); // Grab the initial audio again. if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0) { InitAudioStream(); } // Grab the initial video again. if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0) { currentVideo = TheoraPlay.getVideoFrame(Video.videoStream); previousFrame = Video.videoStream; do { // The decoder miiight not be ready yet. Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder); } while (Video.videoStream == IntPtr.Zero); nextVideo = TheoraPlay.getVideoFrame(Video.videoStream); } // Start! Again! timer.Start(); if (audioStream != null) { audioStream.Play(); } } else { // Stop everything, clean up. We out. State = MediaState.Stopped; if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } TheoraPlay.THEORAPLAY_freeVideo(previousFrame); Video.AttachedToPlayer = false; Video.Dispose(); // We're done, so give them the last frame. return(videoTexture); } } } #if VIDEOPLAYER_OPENGL // Set up an environment to muck about in. GL_pushState(); // Bind our shader program. currentDevice.GLDevice.glUseProgram(shaderProgram); // We're using client-side arrays like CAVEMEN currentDevice.GLDevice.BindVertexBuffer(OpenGLDevice.OpenGLVertexBuffer.NullBuffer); // Set up the vertex pointers/arrays. currentDevice.GLDevice.AttributeEnabled[0] = true; currentDevice.GLDevice.AttributeEnabled[1] = true; for (int i = 2; i < currentDevice.GLDevice.AttributeEnabled.Length; i += 1) { currentDevice.GLDevice.AttributeEnabled[i] = false; } currentDevice.GLDevice.FlushGLVertexAttributes(); currentDevice.GLDevice.VertexAttribPointer( 0, 2, VertexElementFormat.Single, false, 2 * sizeof(float), vertPosPtr ); currentDevice.GLDevice.VertexAttribPointer( 1, 2, VertexElementFormat.Single, false, 2 * sizeof(float), vertTexPtr ); // Bind our target framebuffer. currentDevice.GLDevice.BindDrawFramebuffer(rgbaFramebuffer); // Prepare YUV GL textures with our current frame data currentDevice.GLDevice.glActiveTexture( OpenGLDevice.GLenum.GL_TEXTURE0 ); currentDevice.GLDevice.glBindTexture( OpenGLDevice.GLenum.GL_TEXTURE_2D, yuvTextures[0] ); currentDevice.GLDevice.glTexSubImage2D( OpenGLDevice.GLenum.GL_TEXTURE_2D, 0, 0, 0, (int)currentVideo.width, (int)currentVideo.height, OpenGLDevice.GLenum.GL_LUMINANCE, OpenGLDevice.GLenum.GL_UNSIGNED_BYTE, currentVideo.pixels ); currentDevice.GLDevice.glActiveTexture( OpenGLDevice.GLenum.GL_TEXTURE0 + 1 ); currentDevice.GLDevice.glBindTexture( OpenGLDevice.GLenum.GL_TEXTURE_2D, yuvTextures[1] ); currentDevice.GLDevice.glTexSubImage2D( OpenGLDevice.GLenum.GL_TEXTURE_2D, 0, 0, 0, (int)(currentVideo.width / 2), (int)(currentVideo.height / 2), OpenGLDevice.GLenum.GL_LUMINANCE, OpenGLDevice.GLenum.GL_UNSIGNED_BYTE, new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) ) ); currentDevice.GLDevice.glActiveTexture( OpenGLDevice.GLenum.GL_TEXTURE0 + 2 ); currentDevice.GLDevice.glBindTexture( OpenGLDevice.GLenum.GL_TEXTURE_2D, yuvTextures[2] ); currentDevice.GLDevice.glTexSubImage2D( OpenGLDevice.GLenum.GL_TEXTURE_2D, 0, 0, 0, (int)(currentVideo.width / 2), (int)(currentVideo.height / 2), OpenGLDevice.GLenum.GL_LUMINANCE, OpenGLDevice.GLenum.GL_UNSIGNED_BYTE, new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) + (currentVideo.width / 2 * currentVideo.height / 2) ) ); // Flip the viewport, because loldirectx currentDevice.GLDevice.glViewport( 0, 0, (int)currentVideo.width, (int)currentVideo.height ); // Draw the YUV textures to the framebuffer with our shader. currentDevice.GLDevice.glDrawArrays( OpenGLDevice.GLenum.GL_TRIANGLE_STRIP, 0, 4 ); // Clean up after ourselves. GL_popState(); #else // Just copy it to an array, since it's RGBA anyway. try { byte[] theoraPixels = TheoraPlay.getPixels( currentVideo.pixels, (int)currentVideo.width * (int)currentVideo.height * 4 ); // TexImage2D. videoTexture.SetData <byte>(theoraPixels); } catch (Exception e) { // I hope we've still got something in videoTexture! System.Console.WriteLine( "WARNING: THEORA FRAME COPY FAILED: " + e.Message ); } #endif return(videoTexture); }
/// <summary> /// LoadContent will be called once per game and is the place to load /// all of your content. /// </summary> protected override void LoadContent() { // Create a new SpriteBatch, which can be used to draw textures. _spriteBatch = new SpriteBatch(GraphicsDevice); // TODO: use this.Content to load your game content here // Create white texture _white = new Texture2D(GraphicsDevice, 1, 1); _white.SetData(new[] { Color.White }); // Load image data into memory var path = Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location); path = Path.Combine(path, "image.jpg"); var buffer = File.ReadAllBytes(path); var image = StbImage.LoadFromMemory(buffer, StbImage.STBI_rgb_alpha); _image = new Texture2D(GraphicsDevice, image.Width, image.Height, false, SurfaceFormat.Color); _image.SetData(image.Data); // Load ttf buffer = File.ReadAllBytes("Fonts/DroidSans.ttf"); var buffer2 = File.ReadAllBytes("Fonts/DroidSansJapanese.ttf"); var tempBitmap = new byte[FontBitmapWidth * FontBitmapHeight]; var fontBaker = new FontBaker(); fontBaker.Begin(tempBitmap, FontBitmapWidth, FontBitmapHeight); fontBaker.Add(buffer, 32, new [] { FontBakerCharacterRange.BasicLatin, FontBakerCharacterRange.Latin1Supplement, FontBakerCharacterRange.LatinExtendedA, FontBakerCharacterRange.Cyrillic, }); fontBaker.Add(buffer2, 32, new [] { FontBakerCharacterRange.Hiragana, FontBakerCharacterRange.Katakana }); _charData = fontBaker.End(); // Offset by minimal offset float minimumOffsetY = 10000; foreach (var pair in _charData) { if (pair.Value.yoff < minimumOffsetY) { minimumOffsetY = pair.Value.yoff; } } var keys = _charData.Keys.ToArray(); foreach (var key in keys) { var pc = _charData[key]; pc.yoff -= minimumOffsetY; _charData[key] = pc; } var rgb = new Color[FontBitmapWidth * FontBitmapHeight]; for (var i = 0; i < tempBitmap.Length; ++i) { var b = tempBitmap[i]; rgb[i].R = b; rgb[i].G = b; rgb[i].B = b; rgb[i].A = b; } _fontTexture = new Texture2D(GraphicsDevice, FontBitmapWidth, FontBitmapHeight); _fontTexture.SetData(rgb); // Load ogg path = Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location); path = Path.Combine(path, "Adeste_Fideles.ogg"); buffer = File.ReadAllBytes(path); int chan, sampleRate; var audioShort = StbVorbis.decode_vorbis_from_memory(buffer, out sampleRate, out chan); byte[] audioData = new byte[audioShort.Length / 2 * 4]; for (var i = 0; i < audioShort.Length; ++i) { if (i * 2 >= audioData.Length) { break; } var b1 = (byte)(audioShort[i] >> 8); var b2 = (byte)(audioShort[i] & 256); audioData[i * 2 + 0] = b2; audioData[i * 2 + 1] = b1; } _effect = new DynamicSoundEffectInstance(sampleRate, AudioChannels.Stereo) { Volume = 0.5f }; _effect.SubmitBuffer(audioData); GC.Collect(); }
public XnaSoundRenderer(DynamicSoundEffectInstance dsei) { _dsei = dsei; _dsei.BufferNeeded += new EventHandler <EventArgs>(_dsei_BufferNeeded); }
public Texture2D GetTexture() { checkDisposed(); // Be sure we can even get something from TheoraPlay... if (State == MediaState.Stopped || Video.theoraDecoder == IntPtr.Zero || TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0 || TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) == 0) { return(videoTexture); // Screw it, give them the old one. } // Get the latest video frames. bool missedFrame = false; while (nextVideo.playms <= timer.ElapsedMilliseconds && !missedFrame) { currentVideo = nextVideo; IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder); if (nextFrame != IntPtr.Zero) { TheoraPlay.THEORAPLAY_freeVideo(previousFrame); previousFrame = Video.videoStream; Video.videoStream = nextFrame; nextVideo = TheoraPlay.getVideoFrame(Video.videoStream); missedFrame = false; } else { // Don't mind me, just ignoring that complete failure above! missedFrame = true; } if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0) { // FIXME: This is part of the Duration hack! Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms); // Stop and reset the timer. If we're looping, the loop will start it again. timer.Stop(); timer.Reset(); // If looping, go back to the start. Otherwise, we'll be exiting. if (IsLooped && State == MediaState.Playing) { // Kill the audio, no matter what. if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } // Free everything and start over. TheoraPlay.THEORAPLAY_freeVideo(previousFrame); previousFrame = IntPtr.Zero; Video.AttachedToPlayer = false; Video.Dispose(); Video.AttachedToPlayer = true; Video.Initialize(); // Grab the initial audio again. if (TheoraPlay.THEORAPLAY_hasAudioStream(Video.theoraDecoder) != 0) { InitAudioStream(); } // Grab the initial video again. if (TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) != 0) { currentVideo = TheoraPlay.getVideoFrame(Video.videoStream); previousFrame = Video.videoStream; do { // The decoder miiight not be ready yet. Video.videoStream = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder); } while (Video.videoStream == IntPtr.Zero); nextVideo = TheoraPlay.getVideoFrame(Video.videoStream); } // Start! Again! timer.Start(); if (audioStream != null) { audioStream.Play(); } } else { // Stop everything, clean up. We out. State = MediaState.Stopped; if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } TheoraPlay.THEORAPLAY_freeVideo(previousFrame); Video.AttachedToPlayer = false; Video.Dispose(); // We're done, so give them the last frame. return(videoTexture); } } } #if VIDEOPLAYER_OPENGL // Set up an environment to muck about in. GL_pushState(); // Bind our shader program. GL.UseProgram(shaderProgram); // Set up the vertex pointers/arrays. OpenGLDevice.Instance.Attributes[0].CurrentBuffer = int.MaxValue; OpenGLDevice.Instance.Attributes[1].CurrentBuffer = int.MaxValue; GL.VertexAttribPointer( 0, 2, VertexAttribPointerType.Float, false, 2 * sizeof(float), vert_pos ); GL.VertexAttribPointer( 1, 2, VertexAttribPointerType.Float, false, 2 * sizeof(float), vert_tex ); GL.EnableVertexAttribArray(0); GL.EnableVertexAttribArray(1); // Bind our target framebuffer. OpenGLDevice.Framebuffer.BindFramebuffer(rgbaFramebuffer); // Prepare YUV GL textures with our current frame data GL.ActiveTexture(TextureUnit.Texture0); GL.BindTexture(TextureTarget.Texture2D, yuvTextures[0]); GL.TexSubImage2D( TextureTarget.Texture2D, 0, 0, 0, (int)currentVideo.width, (int)currentVideo.height, PixelFormat.Luminance, PixelType.UnsignedByte, currentVideo.pixels ); GL.ActiveTexture(TextureUnit.Texture1); GL.BindTexture(TextureTarget.Texture2D, yuvTextures[1]); GL.TexSubImage2D( TextureTarget.Texture2D, 0, 0, 0, (int)currentVideo.width / 2, (int)currentVideo.height / 2, PixelFormat.Luminance, PixelType.UnsignedByte, new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) ) ); GL.ActiveTexture(TextureUnit.Texture2); GL.BindTexture(TextureTarget.Texture2D, yuvTextures[2]); GL.TexSubImage2D( TextureTarget.Texture2D, 0, 0, 0, (int)currentVideo.width / 2, (int)currentVideo.height / 2, PixelFormat.Luminance, PixelType.UnsignedByte, new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) + (currentVideo.width / 2 * currentVideo.height / 2) ) ); // Flip the viewport, because loldirectx GL.Viewport( 0, 0, (int)currentVideo.width, (int)currentVideo.height ); // Draw the YUV textures to the framebuffer with our shader. GL.DrawArrays(BeginMode.TriangleStrip, 0, 4); // Clean up after ourselves. GL_popState(); #else // Just copy it to an array, since it's RGBA anyway. try { byte[] theoraPixels = TheoraPlay.getPixels( currentVideo.pixels, (int)currentVideo.width * (int)currentVideo.height * 4 ); // TexImage2D. videoTexture.SetData <byte>(theoraPixels); } catch (Exception e) { // I hope we've still got something in videoTexture! System.Console.WriteLine( "WARNING: THEORA FRAME COPY FAILED: " + e.Message ); } #endif return(videoTexture); }
public override void Initialize(AudioRenderInitializeArgs renderArgs) { Playback = new DynamicSoundEffectInstance(ClockRate, Channels == 2 ? AudioChannels.Stereo : AudioChannels.Mono); Playback.Play(); }
public OggSound(Stream OggStream) { IsReady = false; DecoderThread = new Thread(() => { Debug.WriteLine("SOUND DECODER STARTED!"); InfoHolder = new InfoHolderClass(); if (OggStream.CanSeek) { var Data = new BinaryReader(OggStream).ReadBytes((int)OggStream.Length); int StartIndex = 0; while (true) { int Pos = Array.IndexOf(Data, (byte)'O', StartIndex); if (Pos < 0) { break; } if ( (Data[Pos + 0] == 'O') && (Data[Pos + 1] == 'g') && (Data[Pos + 2] == 'g') && (Data[Pos + 3] == 'S') && (Data[Pos + 4] == '\0') && ((Data[Pos + 5] & 0x04) != 0) ) { InfoHolder.LastGranulePos = Page._ReadGranulePosition(Data, Pos + 6); break; } StartIndex = Pos + 1; } if (InfoHolder.LastGranulePos == 0) { throw (new Exception("Can't find the decoded length of the Ogg Stream (LastGranulePos)")); } OggStream.Position = 0; } //var Stopwatch = new Stopwatch(); //Stopwatch.Start(); OggReader = DecodeTo(OggStream, InfoHolder).GetEnumerator(); OggReader.MoveNext(); if (OggReader.Current.Array != null) { throw (new Exception("Invalid")); } //WaitAtLeastBuffer(4); DynamicSoundEffect = new DynamicSoundEffectInstance(InfoHolder.Info.Rate, (InfoHolder.Info.Channels == 2) ? AudioChannels.Stereo : AudioChannels.Mono); //Debug.WriteLine("FORMAT: channels:{0}, rate:{1}", InfoHolder.Info.channels, InfoHolder.Info.rate); //DynamicSoundEffect = new DynamicSoundEffectInstance(22050, AudioChannels.Mono); DynamicSoundEffect.BufferNeeded += new EventHandler <EventArgs>(DynamicSoundEffect_BufferNeeded); //Debug.WriteLine("START TIME: {0}", Stopwatch.ElapsedMilliseconds); //FillBuffer(); //WaitAtLeastBuffer(4); EnqueueBuffer(); EnqueueBuffer(); Debug.WriteLine("SOUND DECODER ISREADY!"); IsReady = true; while (true) { if (Ended) { return; } if (Stopped) { return; } if (!EnqueueBuffer()) { return; } Thread.Sleep(1); } }); DecoderThread.Start(); }
public LoopedSong(string fileName) { stbVorbisData = FAudio.stb_vorbis_open_filename(fileName, out int error, IntPtr.Zero); FAudio.stb_vorbis_info fileInfo = FAudio.stb_vorbis_get_info(stbVorbisData); Channels = fileInfo.channels; SampleRate = (int)fileInfo.sample_rate; Name = Path.GetFileNameWithoutExtension(fileName); long total_samples = FAudio.stb_vorbis_stream_length_in_samples(stbVorbisData); FAudio.stb_vorbis_comment comments = FAudio.stb_vorbis_get_comment(stbVorbisData); loopStart = 0; loopEnd = (int)total_samples; int loopLength = 0; Tags = new Dictionary <string, string>(); for (int ii = 0; ii < comments.comment_list_length; ii++) { IntPtr ptr = new IntPtr(comments.comment_list.ToInt64() + IntPtr.Size * ii); IntPtr strPtr = (IntPtr)Marshal.PtrToStructure(ptr, typeof(IntPtr)); string comment = Marshal.PtrToStringUTF8(strPtr); string[] split = comment.Split('=', 2); string label = split.Length > 0 ? split[0] : ""; string val = split.Length > 1 ? split[1] : ""; if (label != "") { Tags.Add(label, val); } if (label == "LOOPSTART") { loopStart = Convert.ToInt32(val); } else if (label == "LOOPLENGTH") { loopLength = Convert.ToInt32(val); } } if (loopStart > -1) { if (loopLength > 0) { loopEnd = loopStart + loopLength; } } pcmPosition = 0; soundStream = new DynamicSoundEffectInstance( SampleRate, (Channels == 1) ? AudioChannels.Mono : AudioChannels.Stereo ); chunk = new float[chunkSize]; }
public Texture2D GetTexture() { checkDisposed(); if (Video == null) { throw new InvalidOperationException(); } // Be sure we can even get something from Theorafile... if (State == MediaState.Stopped || Video.theora == IntPtr.Zero || Theorafile.tf_hasvideo(Video.theora) == 0) { // Screw it, give them the old one. return(videoTexture[0].RenderTarget as Texture2D); } int thisFrame = (int)(timer.Elapsed.TotalMilliseconds / (1000.0 / Video.FramesPerSecond)); if (thisFrame > currentFrame) { // Only update the textures if we need to! if (Theorafile.tf_readvideo( Video.theora, yuvData, thisFrame - currentFrame ) == 1 || currentFrame == -1) { UpdateTexture(); } currentFrame = thisFrame; } // Check for the end... bool ended = Theorafile.tf_eos(Video.theora) == 1; if (audioStream != null) { ended &= audioStream.PendingBufferCount == 0; } if (ended) { // FIXME: This is part of the Duration hack! if (Video.needsDurationHack) { Video.Duration = timer.Elapsed; // FIXME: Frames * FPS? -flibit } // Stop and reset the timer. If we're looping, the loop will start it again. timer.Stop(); timer.Reset(); // Kill whatever audio/video we've got if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } // Reset the stream no matter what happens next Theorafile.tf_reset(Video.theora); // If looping, go back to the start. Otherwise, we'll be exiting. if (IsLooped) { // Starting over! InitializeTheoraStream(); // Start! Again! timer.Start(); if (audioStream != null) { audioStream.Play(); } } else { // We out State = MediaState.Stopped; } } // Finally. return(videoTexture[0].RenderTarget as Texture2D); }
public void TestBufferNeeded() { mono8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); mono8Bits.BufferNeeded += SetBufferNeededHasBeenCalledToTrue; var sizeOfOneSubBuffer = 44100 * 200 / 1000; #if SILICONSTUDIO_PLATFORM_ANDROID sizeOfOneSubBuffer = mono8Bits.SubBufferSize; #endif //////////////////////////////////////////////////////////////////////////////////////////////////////// // 1. Check that BufferNeeded is thrown when the user call plays with insufficient number of audio data mono8Bits.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user played without any buffers"); bufferNeededHasBeenCalled = false; mono8Bits.Stop(); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, 1000)); Utilities.Sleep(50); bufferNeededHasBeenCalled = false; mono8Bits.Play(); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user played wit one buffers"); bufferNeededHasBeenCalled = false; mono8Bits.Stop(); /////////////////////////////////////////////////////////////////////////////////////////////////////////////// // 2. Check that BufferNeeded is thrown when the user call SubmitBuffer with insufficient number of audio data mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user submit the first buffer"); bufferNeededHasBeenCalled = false; mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); Utilities.Sleep(50); Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when the user submit the second buffer"); bufferNeededHasBeenCalled = false; mono8Bits.Stop(); //////////////////////////////////////////////////////////////////////////////////////////////////// // 3. Check that BufferNeeded is thrown when the number of buffers falls from 3 to 2, 2 to 1, 1 to 0 mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, sizeOfOneSubBuffer)); Utilities.Sleep(50); bufferNeededHasBeenCalled = false; mono8Bits.Play(); var lastBufferCount = mono8Bits.PendingBufferCount; var loopCount = 0; while (true) { Utilities.Sleep(10); if (lastBufferCount != mono8Bits.PendingBufferCount) { lastBufferCount = mono8Bits.PendingBufferCount; Assert.IsTrue(bufferNeededHasBeenCalled, "Buffer Needed has not been called when number of buffer pass from " + (lastBufferCount + 1) + " to " + lastBufferCount); bufferNeededHasBeenCalled = false; } if (lastBufferCount == 0) { break; } ++loopCount; if (loopCount > 100) { Assert.Fail("The test process is block in the loop."); } } mono8Bits.Stop(); /////////////////////////////////////////////////////////////////////////// // 4. Check that invocation of BufferNeeded does not block audio playback mono8Bits.BufferNeeded -= SetBufferNeededHasBeenCalledToTrue; mono8Bits.BufferNeeded += GenerateNextDataAndBlockThead; mono8Bits.Play(); Utilities.Sleep(2000); mono8Bits.Stop(); mono8Bits.BufferNeeded -= GenerateNextDataAndBlockThead; mono8Bits.Dispose(); }
public void TestSubmitBuffer() { mono8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); var mono16Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_16Bits); var stereo8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_8Bits); var stereo16Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_16Bits); var dispInstance = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_16Bits); dispInstance.Dispose(); /////////////////////////////////////////////////////////////////////// // 1. Test that it throws ObjectDisposedException with disposed instance Assert.Throws<ObjectDisposedException>(() => dispInstance.SubmitBuffer(new byte[16]), "SubmitBuffer did not throw ObjectDisposedException"); /////////////////////////////////////////////////////////// // 2. Test that ArgumentNullException is correctly thrown Assert.Throws<ArgumentNullException>(() => mono8Bits.SubmitBuffer(null), "SubmitBuffer did not throw ArgumentNullException"); ///////////////////////////////////////////////////////////////////////////// // 3. Test that ArgumentException is correctly thrown when buffer length is 0 Assert.Throws<ArgumentException>(() => mono8Bits.SubmitBuffer(new byte[0]), "SubmitBuffer did not throw ArgumentException with 0 length buffer"); ////////////////////////////////////////////////////////////////////////////////////////////////////////////// // 4. Test that ArgumentException is correctly thrown when buffer length do not respect alignment restrictions Assert.Throws<ArgumentException>(() => mono16Bits.SubmitBuffer(new byte[3]), "SubmitBuffer did not throw ArgumentException with 3 length buffer"); /////////////////////////////////////////////////////////////////////////// // 5. Test that ArgumentOutOfRangeException is thrown with negative offset Assert.Throws<ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], -1, 3), "SubmitBuffer did not throw ArgumentOutOfRangeException with -1 offset"); ////////////////////////////////////////////////////////////////////////////////////////////// // 6. Test that ArgumentOutOfRangeException is thrown with offset greater than buffer length Assert.Throws<ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], 16, 3), "SubmitBuffer did not throw ArgumentOutOfRangeException with 16 offset"); ////////////////////////////////////////////////////////////////////////////////// // 7. Test that ArgumentOutOfRangeException is thrown with byteCount is negative Assert.Throws<ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], 16, -1), "SubmitBuffer did not throw ArgumentOutOfRangeException with -1 bytecount"); //////////////////////////////////////////////////////////////////////////////////////////// // 8. Test that ArgumentOutOfRangeException is thrown with offset+byteCount is more buffer length Assert.Throws<ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], 10, 7), "SubmitBuffer did not throw ArgumentOutOfRangeException with offset+bytecount greater than buffer length."); ///////////////////////////////////////////////////////////////////////////////////////// // 9. Check that submitting mono-8bits signals does not crash and has the good behaviour Assert.DoesNotThrow(()=>mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f }, 1, 88200 )), "SubmitBuffer on mono8Bits crached."); mono8Bits.Play(); Utilities.Sleep(2500); ///////////////////////////////////////////////////////////////////////////////////////// // 10. Check that submitting mono-16bits signals does not crash and has the good behaviour Assert.DoesNotThrow(() => mono16Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f }, 2, 176400)), "SubmitBuffer on mono16Bits crached."); mono16Bits.Play(); Utilities.Sleep(2500); /////////////////////////////////////////////////////////////////////////////////////////// // 11. Check that submitting stereo-8bits signals does not crash and has the good behaviour Assert.DoesNotThrow(() => stereo8Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f, 20000f }, 1, 176400)), "SubmitBuffer on stereo8Bits crached."); stereo8Bits.Play(); Utilities.Sleep(2500); /////////////////////////////////////////////////////////////////////////////////////////// // 12 Check that submitting stereo-16bits signals does not crash and has the good behaviour Assert.DoesNotThrow(() => stereo16Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f, 10000f }, 2, 352800)), "SubmitBuffer on stereo16Bits crached."); stereo16Bits.Play(); Utilities.Sleep(2500); ///////////////////////////////////////////////////////////////////// // 13. Check that offset and byte count works in SubmitBuffer method var buffer1 = generator.Generate(44100, new[] { 10000f }, 1, 44100); var buffer2 = generator.Generate(44100, new[] { 40000f }, 1, 44100); var buffer3 = generator.Generate(44100, new[] { 80000f }, 1, 44100); var totalBuffer = new byte[132300]; Array.Copy(buffer1, totalBuffer, 44100); Array.Copy(buffer2, 0, totalBuffer, 44100, 44100); Array.Copy(buffer3, 0, totalBuffer, 88200, 44100); Assert.DoesNotThrow(() => mono8Bits.SubmitBuffer(totalBuffer, 44100, 44100), "SubmitBuffer with offset and bytecount crached."); mono8Bits.Play(); Utilities.Sleep(1500); mono8Bits.Dispose(); mono16Bits.Dispose(); stereo8Bits.Dispose(); stereo16Bits.Dispose(); }
public void TestPendingBufferCount() { mono8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); var dispInstance = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_16Bits); dispInstance.Dispose(); var pendingCount = 0; /////////////////////////////////////////////////////////////////////////// // 1. Test that it throws ObjectDisposedException with disposed instance Assert.Throws<ObjectDisposedException>(() => pendingCount = dispInstance.PendingBufferCount, "PendingBufferCount did not throw ObjectDisposedException"); ////////////////////////////////// // 2. Test that it does not crash Assert.DoesNotThrow(() => pendingCount = mono8Bits.PendingBufferCount, "PendingBufferCount crashed with valid instance"); //////////////////////////// // 3. Test the default value mono8Bits.Stop(); Assert.AreEqual(0, mono8Bits.PendingBufferCount, "PendingBufferCount default value is not 0"); ////////////////////////////////////////////////// // 4. Check the value after adding some buffers mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, 10000)); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, 10000)); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, 10000)); Assert.AreEqual(3, mono8Bits.PendingBufferCount, "PendingBufferCount value is not 3 after adding buffers"); ////////////////////////////////// // 5. Check the value after stop mono8Bits.Stop(); Assert.AreEqual(0, mono8Bits.PendingBufferCount, "PendingBufferCount default value is not 0"); ////////////////////////////////// // 6 Check the value after play mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, 1000)); mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 0f }, 1, 1000)); mono8Bits.Play(); Utilities.Sleep(1000); Assert.AreEqual(0, mono8Bits.PendingBufferCount, "PendingBufferCount value is not 0 after play"); mono8Bits.Stop(); mono8Bits.Dispose(); }
public ChipSound() { chipSound = new DynamicSoundEffectInstance(SampleRate, AudioChannels.Mono); chipSound.BufferNeeded += ChipSound_BufferNeeded; chipSound.SubmitBuffer(GetBuffer()); }
public void TestSubmitBuffer() { mono8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_8Bits); var mono16Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Mono, AudioDataEncoding.PCM_16Bits); var stereo8Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_8Bits); var stereo16Bits = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_16Bits); var dispInstance = new DynamicSoundEffectInstance(defaultEngine, 44100, AudioChannels.Stereo, AudioDataEncoding.PCM_16Bits); dispInstance.Dispose(); /////////////////////////////////////////////////////////////////////// // 1. Test that it throws ObjectDisposedException with disposed instance Assert.Throws <ObjectDisposedException>(() => dispInstance.SubmitBuffer(new byte[16]), "SubmitBuffer did not throw ObjectDisposedException"); /////////////////////////////////////////////////////////// // 2. Test that ArgumentNullException is correctly thrown Assert.Throws <ArgumentNullException>(() => mono8Bits.SubmitBuffer(null), "SubmitBuffer did not throw ArgumentNullException"); ///////////////////////////////////////////////////////////////////////////// // 3. Test that ArgumentException is correctly thrown when buffer length is 0 Assert.Throws <ArgumentException>(() => mono8Bits.SubmitBuffer(new byte[0]), "SubmitBuffer did not throw ArgumentException with 0 length buffer"); ////////////////////////////////////////////////////////////////////////////////////////////////////////////// // 4. Test that ArgumentException is correctly thrown when buffer length do not respect alignment restrictions Assert.Throws <ArgumentException>(() => mono16Bits.SubmitBuffer(new byte[3]), "SubmitBuffer did not throw ArgumentException with 3 length buffer"); /////////////////////////////////////////////////////////////////////////// // 5. Test that ArgumentOutOfRangeException is thrown with negative offset Assert.Throws <ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], -1, 3), "SubmitBuffer did not throw ArgumentOutOfRangeException with -1 offset"); ////////////////////////////////////////////////////////////////////////////////////////////// // 6. Test that ArgumentOutOfRangeException is thrown with offset greater than buffer length Assert.Throws <ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], 16, 3), "SubmitBuffer did not throw ArgumentOutOfRangeException with 16 offset"); ////////////////////////////////////////////////////////////////////////////////// // 7. Test that ArgumentOutOfRangeException is thrown with byteCount is negative Assert.Throws <ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], 16, -1), "SubmitBuffer did not throw ArgumentOutOfRangeException with -1 bytecount"); //////////////////////////////////////////////////////////////////////////////////////////// // 8. Test that ArgumentOutOfRangeException is thrown with offset+byteCount is more buffer length Assert.Throws <ArgumentOutOfRangeException>(() => mono8Bits.SubmitBuffer(new byte[16], 10, 7), "SubmitBuffer did not throw ArgumentOutOfRangeException with offset+bytecount greater than buffer length."); ///////////////////////////////////////////////////////////////////////////////////////// // 9. Check that submitting mono-8bits signals does not crash and has the good behaviour Assert.DoesNotThrow(() => mono8Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f }, 1, 88200)), "SubmitBuffer on mono8Bits crached."); mono8Bits.Play(); Utilities.Sleep(2500); ///////////////////////////////////////////////////////////////////////////////////////// // 10. Check that submitting mono-16bits signals does not crash and has the good behaviour Assert.DoesNotThrow(() => mono16Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f }, 2, 176400)), "SubmitBuffer on mono16Bits crached."); mono16Bits.Play(); Utilities.Sleep(2500); /////////////////////////////////////////////////////////////////////////////////////////// // 11. Check that submitting stereo-8bits signals does not crash and has the good behaviour Assert.DoesNotThrow(() => stereo8Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f, 20000f }, 1, 176400)), "SubmitBuffer on stereo8Bits crached."); stereo8Bits.Play(); Utilities.Sleep(2500); /////////////////////////////////////////////////////////////////////////////////////////// // 12 Check that submitting stereo-16bits signals does not crash and has the good behaviour Assert.DoesNotThrow(() => stereo16Bits.SubmitBuffer(generator.Generate(44100, new[] { 40000f, 10000f }, 2, 352800)), "SubmitBuffer on stereo16Bits crached."); stereo16Bits.Play(); Utilities.Sleep(2500); ///////////////////////////////////////////////////////////////////// // 13. Check that offset and byte count works in SubmitBuffer method var buffer1 = generator.Generate(44100, new[] { 10000f }, 1, 44100); var buffer2 = generator.Generate(44100, new[] { 40000f }, 1, 44100); var buffer3 = generator.Generate(44100, new[] { 80000f }, 1, 44100); var totalBuffer = new byte[132300]; Array.Copy(buffer1, totalBuffer, 44100); Array.Copy(buffer2, 0, totalBuffer, 44100, 44100); Array.Copy(buffer3, 0, totalBuffer, 88200, 44100); Assert.DoesNotThrow(() => mono8Bits.SubmitBuffer(totalBuffer, 44100, 44100), "SubmitBuffer with offset and bytecount crached."); mono8Bits.Play(); Utilities.Sleep(1500); mono8Bits.Dispose(); mono16Bits.Dispose(); stereo8Bits.Dispose(); stereo16Bits.Dispose(); }
public void Start() { Stream = new Mp3Stream(Path); Stream.DecodeFrames(1); lock (ControlLock) { if (Disposed) { return; } Inst = new DynamicSoundEffectInstance(Stream.Frequency, AudioChannels.Stereo); Inst.IsLooped = false; Inst.BufferNeeded += SubmitBufferAsync; if (_State == SoundState.Playing) { Inst.Play(); } else if (_State == SoundState.Paused) { Inst.Play(); Inst.Pause(); } Inst.Volume = _Volume; Inst.Pan = _Pan; Requests = 1; } //SubmitBuffer(null, null); //SubmitBuffer(null, null); DecoderThread = new Thread(() => { try { while (MainThread.IsAlive) { DecodeNext.WaitOne(128); bool go; lock (this) go = Requests > 0; while (go) { var buf = new byte[524288]; var read = Stream.Read(buf, 0, buf.Length); lock (this) { Requests--; NextBuffers.Add(buf); NextSizes.Add(read); if (read == 0) { EndOfStream = true; return; } BufferDone.Set(); } lock (this) go = Requests > 0; } } } catch (Exception e) { } }); DecoderThread.Start(); }
public void TestPlayableInterface() { WaveFormat dataFormat; using (var stream = ContentManager.FileProvider.OpenStream("EffectFishLamp", VirtualFileMode.Open, VirtualFileAccess.Read)) { var memoryStream = new MemoryStream((int)stream.Length); stream.CopyTo(memoryStream); memoryStream.Position = 0; var waveStreamReader = new SoundStream(memoryStream); dataFormat = waveStreamReader.Format; bufferData = new byte[waveStreamReader.Length]; if (waveStreamReader.Read(bufferData, 0, (int)waveStreamReader.Length) != waveStreamReader.Length) { throw new AudioSystemInternalException("The data length read in wave soundStream does not correspond to the stream's length."); } } dynSEInstance = new DynamicSoundEffectInstance(defaultEngine, dataFormat.SampleRate, (AudioChannels)dataFormat.Channels, (AudioDataEncoding)dataFormat.BitsPerSample); dynSEInstance.BufferNeeded += SubmitBuffer; ////////////////// // 1. Test play dynSEInstance.Play(); Utilities.Sleep(2000); Assert.AreEqual(SoundPlayState.Playing, dynSEInstance.PlayState, "Music is not playing"); ////////////////// // 2. Test Pause dynSEInstance.Pause(); Utilities.Sleep(600); Assert.AreEqual(SoundPlayState.Paused, dynSEInstance.PlayState, "Music is not Paused"); dynSEInstance.Play(); Utilities.Sleep(1000); ////////////////// // 2. Test Stop dynSEInstance.Stop(); bufferCount = 0; Utilities.Sleep(600); Assert.AreEqual(SoundPlayState.Stopped, dynSEInstance.PlayState, "Music is not Stopped"); dynSEInstance.Play(); Utilities.Sleep(9000); /////////////////// // 3. Test ExitLoop Assert.DoesNotThrow(dynSEInstance.ExitLoop, "ExitLoop crached"); /////////////// // 4. Volume var value = 1f; var sign = -1f; while (value <= 1f) { dynSEInstance.Volume = value; value += sign * 0.01f; Utilities.Sleep(30); if (value < -0.2) { sign = 1f; } } Utilities.Sleep(2000); ////////////////// // 5.Pan value = 0; sign = -1f; while (value <= 1f) { dynSEInstance.Pan = value; value += sign * 0.01f; Utilities.Sleep(30); if (value < -1.2) { sign = 1f; } } dynSEInstance.Pan = 0; Utilities.Sleep(2000); //////////////////////////////////////////////////////////////////////////// // 7. Wait until the end of the stream to check that there are not crashes Utilities.Sleep(50000); dynSEInstance.Dispose(); }
public Texture2D GetTexture() { checkDisposed(); if (Video == null) { throw new InvalidOperationException(); } // Be sure we can even get something from TheoraPlay... if (State == MediaState.Stopped || Video.theoraDecoder == IntPtr.Zero || TheoraPlay.THEORAPLAY_isInitialized(Video.theoraDecoder) == 0 || TheoraPlay.THEORAPLAY_hasVideoStream(Video.theoraDecoder) == 0) { // Screw it, give them the old one. return(videoTexture[0].RenderTarget as Texture2D); } // Get the latest video frames. bool hasFrames = true; while (nextVideo.playms <= timer.ElapsedMilliseconds && hasFrames) { currentVideo = nextVideo; hasFrames = TheoraPlay.THEORAPLAY_availableVideo(Video.theoraDecoder) > 0; if (hasFrames) { IntPtr nextFrame = TheoraPlay.THEORAPLAY_getVideo(Video.theoraDecoder); TheoraPlay.THEORAPLAY_freeVideo(previousFrame); previousFrame = Video.videoStream; Video.videoStream = nextFrame; nextVideo = TheoraPlay.getVideoFrame(Video.videoStream); } } // Check for the end... if (TheoraPlay.THEORAPLAY_isDecoding(Video.theoraDecoder) == 0) { // FIXME: This is part of the Duration hack! if (Video.needsDurationHack) { Video.Duration = new TimeSpan(0, 0, 0, 0, (int)currentVideo.playms); } // Stop and reset the timer. If we're looping, the loop will start it again. timer.Stop(); timer.Reset(); // Kill whatever audio/video we've got if (audioStream != null) { audioStream.Stop(); audioStream.Dispose(); audioStream = null; } TheoraPlay.THEORAPLAY_freeVideo(previousFrame); Video.AttachedToPlayer = false; Video.Dispose(); // If looping, go back to the start. Otherwise, we'll be exiting. if (IsLooped && State == MediaState.Playing) { // Starting over! Video.AttachedToPlayer = true; InitializeTheoraStream(); // Start! Again! timer.Start(); if (audioStream != null) { audioStream.Play(); } } else { // We out, give them the last frame. State = MediaState.Stopped; return(videoTexture[0].RenderTarget as Texture2D); } } // Set up an environment to muck about in. GL_pushState(); // Prepare YUV GL textures with our current frame data currentDevice.GLDevice.SetTextureData2DPointer( yuvTextures[0], currentVideo.pixels ); currentDevice.GLDevice.SetTextureData2DPointer( yuvTextures[1], new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) ) ); currentDevice.GLDevice.SetTextureData2DPointer( yuvTextures[2], new IntPtr( currentVideo.pixels.ToInt64() + (currentVideo.width * currentVideo.height) + (currentVideo.width / 2 * currentVideo.height / 2) ) ); // Draw the YUV textures to the framebuffer with our shader. currentDevice.DrawPrimitives( PrimitiveType.TriangleStrip, 0, 2 ); // Clean up after ourselves. GL_popState(); // Finally. return(videoTexture[0].RenderTarget as Texture2D); }