/// <summary> /// Create an instance of soundBase. /// </summary> /// <param name="engine">A valid AudioEngine</param> /// <exception cref="ArgumentNullException">The engine argument is null</exception> internal SoundBase(AudioEngine engine) { if (engine == null) throw new ArgumentNullException("engine"); AudioEngine = engine; }
/// <summary> /// Create an new instance of AudioSystem /// </summary> /// <param name="registry">The service registry in which to register the <see cref="AudioSystem"/> services</param> public AudioSystem(IServiceRegistry registry) : base(registry) { Enabled = true; AudioEngine = new AudioEngine(); registry.AddService(typeof(AudioSystem), this); }
/// <summary> /// Create and Load a sound effect from an input wav stream. /// </summary> /// <param name="engine">Name of the audio engine in which to create the sound effect</param> /// <param name="stream">A stream corresponding to a wav file.</param> /// <returns>A new instance soundEffect ready to be played</returns> /// <exception cref="ArgumentNullException"><paramref name="engine"/> or <paramref name="stream"/> is null.</exception> /// <exception cref="NotSupportedException">The wave file or has more than 2 channels or is not encoded in 16bits.</exception> /// <exception cref="InvalidOperationException">The content of the stream does not correspond to a valid wave file.</exception> /// <exception cref="OutOfMemoryException">There is not enough memory anymore to load the specified file in memory. </exception> /// <exception cref="ObjectDisposedException">The audio engine has already been disposed</exception> /// <remarks>Supported WAV files' audio format is the 16bits PCM format.</remarks> public static SoundEffect Load(AudioEngine engine, Stream stream) { if(engine == null) throw new ArgumentNullException("engine"); var newSdEff = new SoundEffect(engine); newSdEff.Load(stream); return newSdEff; }
/// <summary> /// Create and Load a sound music from an input file. /// </summary> /// <param name="engine">The audio engine in which to load the soundMusic</param> /// <param name="stream">The stream.</param> /// <returns>A new instance of soundMusic ready to be played</returns> /// <exception cref="System.ArgumentNullException">engine /// or /// filename</exception> /// <exception cref="System.ObjectDisposedException">The AudioEngine in which to create the voice is disposed.</exception> /// <exception cref="System.ArgumentException">engine or stream</exception> /// <exception cref="ObjectDisposedException">The AudioEngine in which to create the voice is disposed.</exception> /// <exception cref="ArgumentNullException">File ' + filename + ' does not exist.</exception> /// <remarks>On all platform the wav format is supported. /// For compressed formats, it is the task of the build engine to automatically adapt the original files to the best hardware specific format.</remarks> public static SoundMusic Load(AudioEngine engine, Stream stream) { if(engine == null) throw new ArgumentNullException("engine"); if (stream == null) throw new ArgumentNullException("stream"); if(engine.IsDisposed) throw new ObjectDisposedException("The AudioEngine in which to create the voice is disposed."); // TODO: Not portable on WindowsStore var ret = new SoundMusic(engine); ret.Load(stream); return ret; }
internal override void PlayImpl() { AudioEngine.SubmitMusicActionRequest(new SoundMusicActionRequest(this, SoundMusicAction.Play)); // Actual Playing is happening during the Audio Engine update // but we can not wait this long to update the PlayState of the currently playing SoundMusic // after this call to Play, PlayState of the previous playing music should directly be set to Stopped // this is why we use here the static field PreviousPlayingInstance lock (PreviousPlayingInstanceLock) // protection again possible future multithreading. { if (previousPlayingInstance != this) { if (previousPlayingInstance != null) { previousPlayingInstance.SetStateToStopped(); } previousPlayingInstance = this; } } }
/// <summary> /// Create a dynamic sound effect instance with the given sound properties. /// </summary> /// <param name="engine">The engine in which the dynamicSoundEffectInstance is created</param> /// <param name="sampleRate">Sample rate, in Hertz (Hz), of audio content. Must between 8000 Hz and 48000 Hz</param> /// <param name="channels">Number of channels in the audio data.</param> /// <param name="encoding">Encoding of a sound data sample</param> /// <returns>A new DynamicSoundEffectInstance instance ready to filled with data and then played</returns> /// <exception cref="ArgumentOutOfRangeException">This exception is thrown for one of the following reason: /// <list type="bullet"> /// <item>The value specified for sampleRate is less than 8000 Hz or greater than 48000 Hz. </item> /// <item>The value specified for channels is something other than mono or stereo. </item> /// <item>The value specified for data encoding is something other than 8 or 16 bits. </item> /// </list> /// </exception> /// <exception cref="ArgumentNullException"><paramref name="engine"/> is null.</exception> public DynamicSoundEffectInstance(AudioEngine engine, int sampleRate, AudioChannels channels, AudioDataEncoding encoding) : base(engine) { if (engine == null) throw new ArgumentNullException("engine"); if (sampleRate < 8000 || 48000 < sampleRate) throw new ArgumentOutOfRangeException("sampleRate"); if(channels != AudioChannels.Mono && channels != AudioChannels.Stereo) throw new ArgumentOutOfRangeException("channels"); if(encoding != AudioDataEncoding.PCM_8Bits && encoding != AudioDataEncoding.PCM_16Bits) throw new ArgumentOutOfRangeException("encoding"); waveFormat = new WaveFormat(sampleRate, (int)encoding, (int)channels); Interlocked.Increment(ref totalNbOfInstances); Interlocked.Increment(ref numberOfInstances); // first instance of dynamic sound effect instance => we create the workerThead and the associated event. if (numberOfInstances == 1) { instancesNeedingBuffer = new ThreadSafeQueue<DynamicSoundEffectInstance>(); // to be sure that there is no remaining request from previous sessions awakeWorkerThread = new AutoResetEvent(false); CreateWorkerThread(); } Name = "Dynamic Sound Effect Instance - "+totalNbOfInstances; CreateVoice(WaveFormat); InitializeDynamicSound(); AudioEngine.RegisterSound(this); ResetStateToDefault(); }
internal override void DestroyImpl() { AudioEngine.UnregisterSound(this); IsDisposing = true; lock (WorkerLock) // avoid to have simultaneous destroy and submit buffer (via BufferNeeded of working thread). { base.DestroyImpl(); } Interlocked.Decrement(ref numberOfInstances); if (numberOfInstances == 0) { awakeWorkerThread.Set(); if (!workerTask.Wait(500)) { throw new AudioSystemInternalException("The DynamicSoundEffectInstance worker did not complete in allowed time."); } awakeWorkerThread.Dispose(); } }
public AudioVoice(AudioEngine engine, SoundEffectInstance effectInstance, WaveFormat desiredFormat) { if (engine == null) { throw new ArgumentNullException("engine"); } if (desiredFormat == null) { throw new ArgumentNullException("desiredFormat"); } audioEngine = engine; soundEffectInstance = effectInstance; waveFormat = desiredFormat; BusIndexMixer = uint.MaxValue; if (desiredFormat.BitsPerSample != 16) { throw new AudioSystemInternalException("Invalid Audio Format. " + desiredFormat.BitsPerSample + " bits by sample is not supported."); } lock (StaticMembersLock) { if (nbOfInstances == 0) { // Create the Audio Graph audioGraph = new AUGraph(); // Open the graph (does not initialize it yet) audioGraph.Open(); // Create the AudioComponentDescrition corresponding to the IO Remote output and MultiChannelMixer var remoteInOutComponentDesc = AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote); var mixerMultiChannelComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel); var mixer3DComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.Spacial); // Add the Audio Unit nodes to the AudioGraph var outputUnitNodeId = audioGraph.AddNode(remoteInOutComponentDesc); var idChannelMixerNode = audioGraph.AddNode(mixerMultiChannelComponentDesc); var id3DMixerNode = audioGraph.AddNode(mixer3DComponentDesc); // Connect the nodes together CheckGraphError(audioGraph.ConnnectNodeInput(idChannelMixerNode, 0, outputUnitNodeId, 0), "Connection of the graph node failed."); CheckGraphError(audioGraph.ConnnectNodeInput(id3DMixerNode, 0, idChannelMixerNode, MaxNumberOfTracks), "Connection of the graph node failed."); // Get the MixerUnit objects unitChannelMixer = audioGraph.GetNodeInfo(idChannelMixerNode); unit3DMixer = audioGraph.GetNodeInfo(id3DMixerNode); // Set the mixers' output formats (the stream format is propagated along the linked input during the graph initialization) var desiredSampleRate = (engine.AudioSampleRate != 0) ? engine.AudioSampleRate : AudioUnitOutputSampleRate; unit3DMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output); unitChannelMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output); // set the element count to the max number of possible tracks before initializing the audio graph CheckUnitStatus(unitChannelMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks + 1), string.Format("Failed to set element count on ChannelMixer [{0}]", MaxNumberOfTracks + 1)); // +1 for the 3DMixer output CheckUnitStatus(unit3DMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks), string.Format("Failed to set element count on 3DMixer [{0}]", MaxNumberOfTracks)); // set a null renderer callback to the channel and 3d mixer input bus for (uint i = 0; i < MaxNumberOfTracks; i++) { CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unit3DMixer.Handle, i), "Failed to set the render callback"); CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unitChannelMixer.Handle, i), "Failed to set the render callback"); } // Initialize the graph (validation of the topology) CheckGraphError(audioGraph.Initialize(), "The audio graph initialization failed."); // Start audio rendering CheckGraphError(audioGraph.Start(), "Audio Graph could not start."); // disable all the input bus at the beginning for (uint i = 0; i < MaxNumberOfTracks; i++) { CheckUnitStatus(unitChannelMixer.SetParameter(AudioUnitParameterType.MultiChannelMixerEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the ChannelMixerInput."); CheckUnitStatus(unit3DMixer.SetParameter((AudioUnitParameterType)_3DMixerParametersIds.Enable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the 3DMixerInput."); } // At initialization all UnitElement are available. availableMixerBusIndices = new Queue <uint>(); for (uint i = 0; i < MaxNumberOfTracks; i++) { availableMixerBusIndices.Enqueue(i); } } ++nbOfInstances; // Create a AudioDataRendererInfo for the sounds. pAudioDataRendererInfo = (AudioDataRendererInfo *)Utilities.AllocateClearedMemory(sizeof(AudioDataRendererInfo)); pAudioDataRendererInfo->HandleChannelMixer = unitChannelMixer.Handle; pAudioDataRendererInfo->Handle3DMixer = unit3DMixer.Handle; } }
// called on dispose protected override void Destroy() { Game.Activated -= OnActivated; Game.Deactivated -= OnDeactivated; base.Destroy(); AudioEngine.Dispose(); AudioEngine = null; }
internal SoundInstanceBase(AudioEngine engine) : base(engine) { DataBufferLoaded = false; PlayState = SoundPlayState.Stopped; }
// for serialization internal SoundEffect(AudioEngine engine) :base(engine) { }
internal override void DestroyImpl() { AudioEngine.UnregisterSound(this); // mediaInputStream is disposed by AudioEngine.ProcessPlayerClosed() }
internal override void StopImpl() { ShouldExitLoop = false; AudioEngine.SubmitMusicActionRequest(new SoundMusicActionRequest(this, SoundMusicAction.Stop)); }
public AudioVoice(AudioEngine engine, SoundEffectInstance effectInstance, WaveFormat desiredFormat) { if (engine == null) throw new ArgumentNullException("engine"); if (desiredFormat == null) throw new ArgumentNullException("desiredFormat"); audioEngine = engine; soundEffectInstance = effectInstance; waveFormat = desiredFormat; BusIndexMixer = uint.MaxValue; if (desiredFormat.BitsPerSample != 16) throw new AudioSystemInternalException("Invalid Audio Format. " + desiredFormat.BitsPerSample + " bits by sample is not supported."); lock (StaticMembersLock) { if (nbOfInstances == 0) { // Create the Audio Graph audioGraph = new AUGraph(); // Open the graph (does not initialize it yet) audioGraph.Open(); // Create the AudioComponentDescrition corresponding to the IO Remote output and MultiChannelMixer var remoteInOutComponentDesc = AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote); var mixerMultiChannelComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel); var mixer3DComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.Spacial); // Add the Audio Unit nodes to the AudioGraph var outputUnitNodeId = audioGraph.AddNode(remoteInOutComponentDesc); var idChannelMixerNode = audioGraph.AddNode(mixerMultiChannelComponentDesc); var id3DMixerNode = audioGraph.AddNode(mixer3DComponentDesc); // Connect the nodes together CheckGraphError(audioGraph.ConnnectNodeInput(idChannelMixerNode, 0, outputUnitNodeId, 0), "Connection of the graph node failed."); CheckGraphError(audioGraph.ConnnectNodeInput(id3DMixerNode, 0, idChannelMixerNode, MaxNumberOfTracks), "Connection of the graph node failed."); // Get the MixerUnit objects unitChannelMixer = audioGraph.GetNodeInfo(idChannelMixerNode); unit3DMixer = audioGraph.GetNodeInfo(id3DMixerNode); // Set the mixers' output formats (the stream format is propagated along the linked input during the graph initialization) var desiredSampleRate = (engine.AudioSampleRate != 0) ? engine.AudioSampleRate : AudioUnitOutputSampleRate; unit3DMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output); unitChannelMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output); // set the element count to the max number of possible tracks before initializing the audio graph CheckUnitStatus(unitChannelMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks+1), string.Format("Failed to set element count on ChannelMixer [{0}]", MaxNumberOfTracks+1)); // +1 for the 3DMixer output CheckUnitStatus(unit3DMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks), string.Format("Failed to set element count on 3DMixer [{0}]", MaxNumberOfTracks)); // set a null renderer callback to the channel and 3d mixer input bus for (uint i = 0; i < MaxNumberOfTracks; i++) { CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unit3DMixer.Handle, i), "Failed to set the render callback"); CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unitChannelMixer.Handle, i), "Failed to set the render callback"); } // Initialize the graph (validation of the topology) CheckGraphError(audioGraph.Initialize(), "The audio graph initialization failed."); // Start audio rendering CheckGraphError(audioGraph.Start(), "Audio Graph could not start."); // disable all the input bus at the beginning for (uint i = 0; i < MaxNumberOfTracks; i++) { CheckUnitStatus(unitChannelMixer.SetParameter(AudioUnitParameterType.MultiChannelMixerEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the ChannelMixerInput."); CheckUnitStatus(unit3DMixer.SetParameter((AudioUnitParameterType)_3DMixerParametersIds.Enable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the 3DMixerInput."); } // At initialization all UnitElement are available. availableMixerBusIndices = new Queue<uint>(); for (uint i = 0; i < MaxNumberOfTracks; i++) availableMixerBusIndices.Enqueue(i); } ++nbOfInstances; // Create a AudioDataRendererInfo for the sounds. pAudioDataRendererInfo = (AudioDataRendererInfo*)Utilities.AllocateClearedMemory(sizeof(AudioDataRendererInfo)); pAudioDataRendererInfo->HandleChannelMixer = unitChannelMixer.Handle; pAudioDataRendererInfo->Handle3DMixer = unit3DMixer.Handle; } }
// for serialization internal SoundMusic(AudioEngine engine) : base(engine) { }
// for serialization internal SoundEffect(AudioEngine engine) : base(engine) { }
internal override void PauseImpl() { AudioEngine.SubmitMusicActionRequest(new SoundMusicActionRequest(this, SoundMusicAction.Pause)); }
private SoundEffect(AudioEngine engine) : base(engine) { }
internal override void UpdateVolume() { AudioEngine.SubmitMusicActionRequest(new SoundMusicActionRequest(this, SoundMusicAction.Volume)); }
//prevent creation of SoundEffectInstance to the user and other classes internal SoundEffectInstance(AudioEngine engine) : base(engine) { soundEffect = null; }