Exemple #1
0
        /// <summary>
        /// Builds the audio graph, initializes the mixer.
        /// </summary>
        protected void BuildAUGraph()
        {
            Graph = new AUGraph();

            // use splitter sub-type to create file writer tap

            // output unit. output to default audio device
            int outputNode = Graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Default));

            // mixer unit
            //int mixerNode = Graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel));

            //var mixerDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);
            MixerNode = AudioComponent.FindComponent(AudioTypeMixer.MultiChannel).CreateAudioUnit();

            // connect the mixer's output to the output's input
            //if (Graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0) != AUGraphError.OK)
            //{
            //    throw new ApplicationException();
            //}

            // open the graph
            if (Graph.TryOpen() != 0)
            {
                throw new ApplicationException();
            }

            Graph.SetNodeInputCallback(outputNode, 0, OutputRenderDelegate);

            Output = Graph.GetNodeInfo(outputNode);
            //MixerNode = Graph.GetNodeInfo(mixerNode);
            // must set ouput volume because it defaults to 0
            MixerNode.SetParameter(AudioUnitParameterType.MultiChannelMixerVolume, 1, AudioUnitScopeType.Output, 0);
            //MixerNode.SetMaximumFramesPerSlice(4096, AudioUnitScopeType.Global);

            ConfigureMixerInputs();

            AudioStreamBasicDescription desc;

            // set output stream format
            desc            = MixerNode.GetAudioFormat(AudioUnitScopeType.Output);
            desc.SampleRate = Metronome.SampleRate;
            if (MixerNode.SetFormat(desc, AudioUnitScopeType.Output) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            // now that we;ve set everything up we can initialize the graph, this will aslo validate the connections
            if (Graph.Initialize() != AUGraphError.OK)
            {
                throw new ApplicationException();
            }

            MixerNode.Initialize();
        }
Exemple #2
0
        void createAudioProcessGraph()
        {
            graph = new AUGraph();

            var sampler = graph.AddNode(AudioComponentDescription.CreateMusicDevice(AudioTypeMusicDevice.Sampler));
            var inout   = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote));

            graph.Open();
            graph.ConnnectNodeInput(sampler, 0, inout, 0);

            samplerUnit = graph.GetNodeInfo(sampler);

            graph.Initialize();
            graph.Start();
        }
Exemple #3
0
        public void Connections()
        {
            using (var aug = new AUGraph()) {
                aug.Open();

                var node_1 = aug.AddNode(AudioComponentDescription.CreateGenerator(AudioTypeGenerator.AudioFilePlayer));
                var node_2 = aug.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));

                Assert.AreEqual(AUGraphError.OK, aug.ConnnectNodeInput(node_1, 0, node_2, 0), "#1");
                uint count;
                aug.GetNumberOfInteractions(out count);
                Assert.AreEqual(1, count, "#2");

                Assert.AreEqual(AUGraphError.OK, aug.Initialize(), "#3");

                Assert.AreEqual(AUGraphError.OK, aug.ClearConnections(), "#4");
                aug.GetNumberOfInteractions(out count);
                Assert.AreEqual(0, count, "#5");
            }
        }
        AUGraph CreateAudioGraph()
        {
            var graph = new AUGraph();

            var ioNode = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote));
            var mix    = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);

            mixNode = graph.AddNode(mix);

            graph.ConnnectNodeInput(mixNode, 0, ioNode, 0);

            graph.Open();

            var mixUnit = graph.GetNodeInfo(mixNode);

            mixUnit.SetElementCount(AudioUnitScopeType.Input, 5);
//			mixUnit.SetParameter (AudioUnitParameterType.MultiChannelMixerVolume, 1, AudioUnitScopeType.Input, 0);
//			mixUnit.SetParameter (AudioUnitParameterType.MultiChannelMixerVolume, 1, AudioUnitScopeType.Input, 1);
            mixUnit.SetMaximumFramesPerSlice(4096, AudioUnitScopeType.Global, 0);

            return(graph);
        }
Exemple #5
0
        public void BasicOperations()
        {
            using (var aug = new AUGraph()) {
                aug.Open();
                Assert.IsTrue(aug.IsOpen, "#0");
                Assert.IsFalse(aug.IsInitialized, "#0a");
                Assert.IsFalse(aug.IsRunning, "#0b");

                var node = aug.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));
                int count;
                Assert.AreEqual(AUGraphError.OK, aug.GetNodeCount(out count), "#1");
                Assert.AreEqual(1, count, "#2");

                var info = aug.GetNodeInfo(node);
                Assert.IsNotNull(info, "#3");

                int node2;
                Assert.AreEqual(AUGraphError.OK, aug.GetNode(0, out node2), "#4");
                Assert.AreEqual(1, node2, "#4a");

                float max_load;
                Assert.AreEqual(AUGraphError.OK, aug.GetMaxCPULoad(out max_load));
            }
        }
        public static AudioComponent FindComponent(AudioTypeOutput output)
        {
            var cd = AudioComponentDescription.CreateOutput(output);

            return(FindComponent(ref cd));
        }
Exemple #7
0
        public AudioVoice(AudioEngine engine, SoundEffectInstance effectInstance, WaveFormat desiredFormat)
        {
            if (engine == null)
            {
                throw new ArgumentNullException("engine");
            }
            if (desiredFormat == null)
            {
                throw new ArgumentNullException("desiredFormat");
            }

            audioEngine         = engine;
            soundEffectInstance = effectInstance;
            waveFormat          = desiredFormat;
            BusIndexMixer       = uint.MaxValue;

            if (desiredFormat.BitsPerSample != 16)
            {
                throw new AudioSystemInternalException("Invalid Audio Format. " + desiredFormat.BitsPerSample + " bits by sample is not supported.");
            }

            lock (StaticMembersLock)
            {
                if (nbOfInstances == 0)
                {
                    // Create the Audio Graph
                    audioGraph = new AUGraph();

                    // Open the graph (does not initialize it yet)
                    audioGraph.Open();

                    // Create the AudioComponentDescrition corresponding to the IO Remote output and MultiChannelMixer
                    var remoteInOutComponentDesc       = AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote);
                    var mixerMultiChannelComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);
                    var mixer3DComponentDesc           = AudioComponentDescription.CreateMixer(AudioTypeMixer.Spacial);

                    // Add the Audio Unit nodes to the AudioGraph
                    var outputUnitNodeId   = audioGraph.AddNode(remoteInOutComponentDesc);
                    var idChannelMixerNode = audioGraph.AddNode(mixerMultiChannelComponentDesc);
                    var id3DMixerNode      = audioGraph.AddNode(mixer3DComponentDesc);

                    // Connect the nodes together
                    CheckGraphError(audioGraph.ConnnectNodeInput(idChannelMixerNode, 0, outputUnitNodeId, 0), "Connection of the graph node failed.");
                    CheckGraphError(audioGraph.ConnnectNodeInput(id3DMixerNode, 0, idChannelMixerNode, MaxNumberOfTracks), "Connection of the graph node failed.");

                    // Get the MixerUnit objects
                    unitChannelMixer = audioGraph.GetNodeInfo(idChannelMixerNode);
                    unit3DMixer      = audioGraph.GetNodeInfo(id3DMixerNode);

                    // Set the mixers' output formats (the stream format is propagated along the linked input during the graph initialization)
                    var desiredSampleRate = (engine.AudioSampleRate != 0) ? engine.AudioSampleRate : AudioUnitOutputSampleRate;
                    unit3DMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);
                    unitChannelMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);

                    // set the element count to the max number of possible tracks before initializing the audio graph
                    CheckUnitStatus(unitChannelMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks + 1), string.Format("Failed to set element count on ChannelMixer [{0}]", MaxNumberOfTracks + 1)); // +1 for the 3DMixer output
                    CheckUnitStatus(unit3DMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks), string.Format("Failed to set element count on 3DMixer [{0}]", MaxNumberOfTracks));

                    // set a null renderer callback to the channel and 3d mixer input bus
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unit3DMixer.Handle, i), "Failed to set the render callback");
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unitChannelMixer.Handle, i), "Failed to set the render callback");
                    }

                    // Initialize the graph (validation of the topology)
                    CheckGraphError(audioGraph.Initialize(), "The audio graph initialization failed.");

                    // Start audio rendering
                    CheckGraphError(audioGraph.Start(), "Audio Graph could not start.");

                    // disable all the input bus at the beginning
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus(unitChannelMixer.SetParameter(AudioUnitParameterType.MultiChannelMixerEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the ChannelMixerInput.");
                        CheckUnitStatus(unit3DMixer.SetParameter((AudioUnitParameterType)_3DMixerParametersIds.Enable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the 3DMixerInput.");
                    }

                    // At initialization all UnitElement are available.
                    availableMixerBusIndices = new Queue <uint>();
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        availableMixerBusIndices.Enqueue(i);
                    }
                }
                ++nbOfInstances;

                // Create a AudioDataRendererInfo for the sounds.
                pAudioDataRendererInfo = (AudioDataRendererInfo *)Utilities.AllocateClearedMemory(sizeof(AudioDataRendererInfo));
                pAudioDataRendererInfo->HandleChannelMixer = unitChannelMixer.Handle;
                pAudioDataRendererInfo->Handle3DMixer      = unit3DMixer.Handle;
            }
        }
        void StreamPropertyListenerProc(object sender, PropertyFoundEventArgs args)
        {
            if (args.Property == AudioFileStreamProperty.DataFormat)
            {
                dataFormat = audioFileStream.DataFormat;
                return;
            }

            if (args.Property != AudioFileStreamProperty.ReadyToProducePackets)
            {
                return;
            }

            if (audioQueue != null)
            {
                // TODO: Dispose
                throw new NotImplementedException();
            }

            audioQueue = new OutputAudioQueue(dataFormat);
            audioQueue.BufferCompleted += HandleBufferCompleted;

            AudioQueueStatus status;

            aqTap = audioQueue.CreateProcessingTap(TapProc, AudioQueueProcessingTapFlags.PreEffects, out status);
            if (status != AudioQueueStatus.Ok)
            {
                throw new ApplicationException("Could not create AQ tap");
            }

            // create an augraph to process in the tap. needs to convert from tapFormat to effect format and back

            /* note: this is invalidname's recipe to do an in-place effect when a format conversion is needed
             * before and after the effect, usually because effects want floats, and everything else in iOS
             * core audio works with ints (or, in rare cases, fixed-point).
             * the graph looks like this:
             * [render-callback] -> [converter] -> [effect] -> [converter] -> [generic-output]
             * prior to calling AudioUnitRender() on generic-output the ioData to a pointer that render-callback
             * knows about, and NULLs the ioData provided to AudioUnitRender(). the NULL tells generic-output to
             * pull from its upstream units (ie, the augraph), and copying off the ioData pointer allows the
             * render-callback	to provide it to the front of the stream. in some locales, this kind of shell game
             * is described as "batshit crazy", but it seems to work pretty well in practice.
             */

            auGraph = new AUGraph();
            auGraph.Open();
            var effectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.NewTimePitch));

            effectUnit = auGraph.GetNodeInfo(effectNode);

            var convertToEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertToEffectUnit = auGraph.GetNodeInfo(convertToEffectNode);

            var convertFromEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertFromEffectUnit = auGraph.GetNodeInfo(convertFromEffectNode);

            var genericOutputNode = auGraph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));

            genericOutputUnit = auGraph.GetNodeInfo(genericOutputNode);

            // set the format conversions throughout the graph
            var effectFormat = effectUnit.GetAudioFormat(AudioUnitScopeType.Output);
            var tapFormat    = aqTap.ProcessingFormat;

            convertToEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            convertToEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Output);

            convertFromEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Input);
            convertFromEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            // set maximum fames per slice higher (4096) so we don't get kAudioUnitErr_TooManyFramesToProcess
            const uint maxFramesPerSlice = 4096;

            if (convertToEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (effectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (convertFromEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (genericOutputUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            // connect the nodes
            auGraph.ConnnectNodeInput(convertToEffectNode, 0, effectNode, 0);
            auGraph.ConnnectNodeInput(effectNode, 0, convertFromEffectNode, 0);
            auGraph.ConnnectNodeInput(convertFromEffectNode, 0, genericOutputNode, 0);

            // set up the callback into the first convert unit
            if (convertToEffectUnit.SetRenderCallback(ConvertInputRenderCallback, AudioUnitScopeType.Global) != AudioUnitStatus.NoError)
            {
                throw new ApplicationException();
            }

            var res = auGraph.Initialize();

            if (res != AUGraphError.OK)
            {
                throw new ApplicationException();
            }
        }