public void CopyIconTest () { TestRuntime.AssertXcodeVersion (12, TestRuntime.MinorXcode12APIMismatch); AudioComponentDescription cd = new AudioComponentDescription () { ComponentType = AudioComponentType.Output, #if MONOMAC #if NET ComponentSubType = AudioUnitSubType.VoiceProcessingIO, #else ComponentSubType = (int)AudioUnitSubType.VoiceProcessingIO, #endif #else #if NET ComponentSubType = (AudioUnitSubType) AudioTypeOutput.Remote, #else ComponentSubType = 0x72696f63, // Remote_IO #endif #endif ComponentManufacturer = AudioComponentManufacturerType.Apple }; AudioComponent component = AudioComponent.FindComponent (ref cd); Assert.DoesNotThrow ( () => { var icon = component.CopyIcon (); // ensuring that the manual binding does not throw, we do not care about the result }); }
public void DisposeMethodTest () { // Test case from bxc #5410 // Create instance of AudioUnit object AudioComponentDescription cd = new AudioComponentDescription () { ComponentType = AudioComponentType.Output, #if MONOMAC #if NET ComponentSubType = AudioUnitSubType.VoiceProcessingIO, #else ComponentSubType = (int)AudioUnitSubType.VoiceProcessingIO, #endif #else #if NET ComponentSubType = (AudioUnitSubType) AudioTypeOutput.Remote, #else ComponentSubType = 0x72696f63, // Remote_IO #endif #endif ComponentManufacturer = AudioComponentManufacturerType.Apple }; AudioComponent component = AudioComponent.FindComponent (ref cd); var audioUnit = component.CreateAudioUnit (); audioUnit.Dispose (); }
public void GetSetNullComponentList() { TestRuntime.AssertXcodeVersion(9, 0); var types = new List <AudioTypeOutput> { AudioTypeOutput.Generic, AudioTypeOutput.Remote, AudioTypeOutput.VoiceProcessingIO }; foreach (var t in types) { var resources = new ResourceUsageInfo(); resources.IOKitUserClient = new string[] { "CustomUserClient1" }; resources.MachLookUpGlobalName = new string[] { "MachServiceName1" }; resources.NetworkClient = false; resources.TemporaryExceptionReadWrite = false; var componentInfo = new AudioComponentInfo(); componentInfo.Type = t.ToString(); componentInfo.Subtype = "XMPL"; componentInfo.Name = "XMPL"; componentInfo.Version = 1; componentInfo.ResourceUsage = resources; var component = AudioComponent.FindComponent(t); if (component == null) { continue; } //monotouchtests does not have permissions to deal with the hwd. Assert.Throws <ArgumentNullException> (() => component.ComponentList = null); } }
void prepareAudioUnit() { // creating an AudioComponentDescription of the RemoteIO AudioUnit AudioComponentDescription cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent using the audio component description _audioComponent = AudioComponent.FindComponent(cd); // creating an audio unit instance _audioUnit = AudioUnit.CreateInstance(_audioComponent); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0 // Remote Output ); // setting callback method _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback); _audioUnit.Initialize(); }
void prepareAudioUnit() { // Creating AudioComponentDescription instance of RemoteIO Audio Unit AudioComponentDescription cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent from the description _component = AudioComponent.FindComponent(cd); // Getting Audiounit _audioUnit = AudioUnit.CreateInstance(_component); // setting AudioStreamBasicDescription int AudioUnitSampleTypeSize; if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) { AudioUnitSampleTypeSize = sizeof(float); } else { AudioUnitSampleTypeSize = sizeof(int); } AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription() { SampleRate = _sampleRate, Format = AudioFormatType.LinearPCM, //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift), FormatFlags = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)), ChannelsPerFrame = 2, BytesPerPacket = AudioUnitSampleTypeSize, BytesPerFrame = AudioUnitSampleTypeSize, FramesPerPacket = 1, BitsPerChannel = 8 * AudioUnitSampleTypeSize, Reserved = 0 }; _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0); // setting callback /* * if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) * _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback); * else * _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback); * */ _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(device_callback); }
/// <summary> /// Builds the audio graph, initializes the mixer. /// </summary> protected void BuildAUGraph() { Graph = new AUGraph(); // use splitter sub-type to create file writer tap // output unit. output to default audio device int outputNode = Graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Default)); // mixer unit //int mixerNode = Graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel)); //var mixerDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel); MixerNode = AudioComponent.FindComponent(AudioTypeMixer.MultiChannel).CreateAudioUnit(); // connect the mixer's output to the output's input //if (Graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0) != AUGraphError.OK) //{ // throw new ApplicationException(); //} // open the graph if (Graph.TryOpen() != 0) { throw new ApplicationException(); } Graph.SetNodeInputCallback(outputNode, 0, OutputRenderDelegate); Output = Graph.GetNodeInfo(outputNode); //MixerNode = Graph.GetNodeInfo(mixerNode); // must set ouput volume because it defaults to 0 MixerNode.SetParameter(AudioUnitParameterType.MultiChannelMixerVolume, 1, AudioUnitScopeType.Output, 0); //MixerNode.SetMaximumFramesPerSlice(4096, AudioUnitScopeType.Global); ConfigureMixerInputs(); AudioStreamBasicDescription desc; // set output stream format desc = MixerNode.GetAudioFormat(AudioUnitScopeType.Output); desc.SampleRate = Metronome.SampleRate; if (MixerNode.SetFormat(desc, AudioUnitScopeType.Output) != AudioUnitStatus.OK) { throw new ApplicationException(); } // now that we;ve set everything up we can initialize the graph, this will aslo validate the connections if (Graph.Initialize() != AUGraphError.OK) { throw new ApplicationException(); } MixerNode.Initialize(); }
public Audio(DisposableI parent) : base(parent) { #if OSX component = AudioComponent.FindComponent(AudioTypeOutput.Default); #else component = AudioComponent.FindComponent(AudioTypeOutput.Remote); #endif if (component == null) { Debug.ThrowError("Audio", "Failed to find AudioComponent"); } }
void prepareAudioUnit() { // Updated for deprecated AudioSession var session = AVAudioSession.SharedInstance(); NSError error; if (session == null) { var alert = new UIAlertView("Session error", "Unable to create audio session", null, "Cancel"); alert.Show(); alert.Clicked += delegate { alert.DismissWithClickedButtonIndex(0, true); return; }; } session.SetActive(true); session.SetCategory(AVAudioSessionCategory.PlayAndRecord); session.SetPreferredIOBufferDuration(0.005, out error); // Getting AudioComponent Remote output _audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote); // creating an audio unit instance _audioUnit = new AudioUnit(_audioComponent); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnitScopeType.Input, 1 // Remote Input ); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnitScopeType.Input, 0 // Remote Output ); var format = AudioStreamBasicDescription.CreateLinearPCM(_sampleRate, bitsPerChannel: 32); format.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsNativeFloat; _audioUnit.SetAudioFormat(format, AudioUnitScopeType.Output, 1); // setting callback method _audioUnit.SetRenderCallback(_audioUnit_RenderCallback, AudioUnitScopeType.Global); _audioUnit.Initialize(); _audioUnit.Start(); }
public void DisposeMethodTest() { // Test case from bxc #5410 // Create instance of AudioUnit object AudioComponentDescription cd = new AudioComponentDescription() { ComponentType = AudioComponentType.Output, ComponentSubType = 0x72696f63, // Remote_IO ComponentManufacturer = AudioComponentManufacturerType.Apple }; AudioComponent component = AudioComponent.FindComponent(ref cd); var audioUnit = component.CreateAudioUnit(); audioUnit.Dispose(); }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.005f; // creating an AudioComponentDescription of the RemoteIO AudioUnit AudioComponentDescription cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent using the audio component description _audioComponent = AudioComponent.FindComponent(cd); // creating an audio unit instance _audioUnit = AudioUnit.CreateInstance(_audioComponent); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 1 // Remote Input ); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0 // Remote Output ); _audioUnit.SetAudioFormat(AudioUnitUtils.AUCanonicalASBD(_sampleRate, 2), AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, 1 // Remote input ); // setting callback method _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback); _audioUnit.Initialize(); _audioUnit.Start(); }
void PrepareAudioUnit() { // All iPhones and iPads have microphones, but early iPod touches did not if (!AudioSession.AudioInputAvailable) { var noInputAlert = new UIAlertView("No audio input", "No audio input device is currently attached", null, "Ok"); noInputAlert.Show(); return; } // Getting AudioComponent Remote output audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote); CheckValue(audioComponent); // creating an audio unit instance audioUnit = new AudioUnit.AudioUnit(audioComponent); AudioUnitStatus status; status = audioUnit.SetEnableIO(true, AudioUnitScopeType.Input, 1); CheckStatus(status); status = audioUnit.SetEnableIO(true, AudioUnitScopeType.Output, 0); CheckStatus(status); dstFormat = new AudioStreamBasicDescription { SampleRate = AudioSession.CurrentHardwareSampleRate, Format = AudioFormatType.LinearPCM, FormatFlags = AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsNonInterleaved, BytesPerPacket = 4, FramesPerPacket = 1, BytesPerFrame = 4, ChannelsPerFrame = 2, BitsPerChannel = 16 }; audioUnit.SetAudioFormat(dstFormat, AudioUnitScopeType.Input, 0); audioUnit.SetAudioFormat(dstFormat, AudioUnitScopeType.Output, 1); status = audioUnit.SetRenderCallback(RenderCallback, AudioUnitScopeType.Input, 0); CheckStatus(status); }
void prepareAudioUnit() { // Creating AudioComponentDescription instance of RemoteIO Audio Unit var cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent from the description _component = AudioComponent.FindComponent(cd); // Getting Audiounit _audioUnit = AudioUnit.CreateInstance(_component); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 1 // Remote Input ); // setting AudioStreamBasicDescription var audioFormat = AudioUnitUtils.AUCanonicalASBD(44100.0, 2); _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0 // Remote output ); _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, 1 // Remote input ); // setting callback _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback); // initialize _audioUnit.Initialize(); }
void CreateAudioUnit() { if (_audioUnit != null) { _audioUnit.Dispose(); _audioUnit = null; } ResetFormat(2); var audioUnit = new AudioUnit.AudioUnit(AudioComponent.FindComponent(AudioTypeOutput.Remote)); if (_numInputChannels > 0) { audioUnit.SetEnableIO(true, AudioUnitScopeType.Input, 1); } audioUnit.SetRenderCallback(AudioUnit_RenderCallback, AudioUnitScopeType.Input); AudioUnitStatus status; status = audioUnit.SetFormat(_format, AudioUnitScopeType.Input, 0); if (status != AudioUnitStatus.OK) { throw new Exception("Could not initialize audio unit: " + status); } status = audioUnit.SetFormat(_format, AudioUnitScopeType.Output, 1); if (status != AudioUnitStatus.OK) { throw new Exception("Could not initialize audio unit: " + status); } var osStatus = audioUnit.Initialize(); if (osStatus != 0) { throw new Exception("Could not initialize audio unit: " + osStatus); } _audioUnit = audioUnit; }
public void Callbacks() { var audioComponent = AudioComponent.FindComponent(AudioTypeOutput.VoiceProcessingIO); using var audioUnit = new global::AudioUnit.AudioUnit(audioComponent); var rv = audioUnit.SetInputCallback(InputCallback, AudioUnitScopeType.Input, 1); if (rv == AudioUnitStatus.CannotDoInCurrentContext) { Assert.Ignore("Can't set input callback"); // No microphone? In a VM? This seems to happen often on bots. } Assert.AreEqual(AudioUnitStatus.OK, rv, "SetInputCallback"); Assert.AreEqual(AudioUnitStatus.OK, audioUnit.Initialize(), "Initialize"); try { Assert.AreEqual(AudioUnitStatus.OK, audioUnit.Start(), "Start"); Assert.IsTrue(inputCallbackEvent.WaitOne(TimeSpan.FromSeconds(1)), "No input callback for 1 second"); } finally { Assert.AreEqual(AudioUnitStatus.OK, audioUnit.Stop(), "Stop"); } }
public void GetSetComponentList() { TestRuntime.AssertXcodeVersion(9, 0); #if !MONOMAC var types = new List <AudioTypeOutput> { AudioTypeOutput.Generic, AudioTypeOutput.Remote, AudioTypeOutput.VoiceProcessingIO }; #else var types = new List <AudioTypeOutput> { AudioTypeOutput.Generic, AudioTypeOutput.Default, AudioTypeOutput.HAL, AudioTypeOutput.System, AudioTypeOutput.VoiceProcessingIO }; #endif foreach (var t in types) { var resources = new ResourceUsageInfo(); resources.IOKitUserClient = new string[] { "CustomUserClient1" }; resources.MachLookUpGlobalName = new string[] { "MachServiceName1" }; resources.NetworkClient = false; resources.TemporaryExceptionReadWrite = false; var componentInfo = new AudioComponentInfo(); componentInfo.Type = t.ToString(); componentInfo.Subtype = "XMPL"; componentInfo.Name = "XMPL"; componentInfo.Version = 1; componentInfo.ResourceUsage = resources; var component = AudioComponent.FindComponent(t); if (component == null) { continue; } var l = component.ComponentList; Assert.IsNull(l, "List is not null."); l = new AudioComponentInfo[] { componentInfo }; //monotouchtests does not have permissions to deal with the hwd. Assert.Throws <InvalidOperationException> (() => component.ComponentList = l); } }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.005f; // Getting AudioComponent Remote output _audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote); // creating an audio unit instance _audioUnit = new AudioUnit(_audioComponent); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnitScopeType.Input, 1 // Remote Input ); // setting audio format _audioUnit.SetAudioFormat(_dstFormat, AudioUnitScopeType.Input, 0 // Remote Output ); var format = AudioStreamBasicDescription.CreateLinearPCM(_sampleRate, bitsPerChannel: 32); format.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsAudioUnitCanonical; _audioUnit.SetAudioFormat(format, AudioUnitScopeType.Output, 1); // setting callback method _audioUnit.SetRenderCallback(_audioUnit_RenderCallback, AudioUnitScopeType.Global); _audioUnit.Initialize(); _audioUnit.Start(); }
void prepareAudioUnit() { // AudioSession AudioSession.Initialize(); AudioSession.SetActive(true); AudioSession.Category = AudioSessionCategory.PlayAndRecord; AudioSession.PreferredHardwareIOBufferDuration = 0.01f; // Creating AudioComponentDescription instance of RemoteIO Audio Unit var cd = new AudioComponentDescription() { componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output, componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO, componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple, componentFlags = 0, componentFlagsMask = 0 }; // Getting AudioComponent from the description _component = AudioComponent.FindComponent(cd); // Getting Audiounit _audioUnit = AudioUnit.CreateInstance(_component); // turning on microphone _audioUnit.SetEnableIO(true, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 1 // Remote Input ); // setting AudioStreamBasicDescription int AudioUnitSampleTypeSize = (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) ? sizeof(float) : sizeof(uint); AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription() { SampleRate = _sampleRate, Format = AudioFormatType.LinearPCM, //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift), FormatFlags = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)), ChannelsPerFrame = 2, BytesPerPacket = AudioUnitSampleTypeSize, BytesPerFrame = AudioUnitSampleTypeSize, FramesPerPacket = 1, BitsPerChannel = 8 * AudioUnitSampleTypeSize, Reserved = 0 }; _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0 // Remote output ); _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, 1 // Remote input ); // setting callback /* * if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) * _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback); * else * _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback); */ _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_callback); // initialize _audioUnit.Initialize(); }
public void InitAudio() { var session = AVAudioSession.SharedInstance(); NSError error; if (session == null) { var alert = new UIAlertView("Session error", "Unable to create audio session", null, "Cancel"); alert.Show(); alert.Clicked += delegate { alert.DismissWithClickedButtonIndex(0, true); return; }; } session.SetActive(false); session.SetCategory(AVAudioSessionCategory.Playback, AVAudioSessionCategoryOptions.AllowBluetooth | AVAudioSessionCategoryOptions.DefaultToSpeaker | AVAudioSessionCategoryOptions.DuckOthers); //Neded so we can listen to remote events notification = AVAudioSession.Notifications.ObserveInterruption((sender, args) => { /* Handling audio interuption here */ if (args.InterruptionType == AVAudioSessionInterruptionType.Began) { if (_audioUnit != null && _audioUnit.IsPlaying) { _audioUnit.Stop(); } } System.Diagnostics.Debug.WriteLine("Notification: {0}", args.Notification); System.Diagnostics.Debug.WriteLine("InterruptionType: {0}", args.InterruptionType); System.Diagnostics.Debug.WriteLine("Option: {0}", args.Option); }); var opts = session.CategoryOptions; session.SetPreferredIOBufferDuration(0.01, out error); session.SetActive(true); _audioFormat = AudioStreamBasicDescription.CreateLinearPCM(_sampleRate, bitsPerChannel: 32); _audioFormat.FormatFlags |= AudioFormatFlags.IsNonInterleaved | AudioFormatFlags.IsFloat; _audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote); // creating an audio unit instance _audioUnit = new AudioUnit.AudioUnit(_audioComponent); // setting audio format _audioUnit.SetAudioFormat(_audioFormat, AudioUnitScopeType.Input, 0 // Remote Output ); //_audioFormat.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsNativeFloat; _audioUnit.SetAudioFormat(_audioFormat, AudioUnitScopeType.Output, 1); // setting callback method _audioUnit.SetRenderCallback(_audioUnit_RenderCallback, AudioUnitScopeType.Global); _audioUnit.Initialize(); _audioUnit.Stop(); }