示例#1
0
        void prepareAUGraph()
        {
            // Creating audio graph instance
            _auGraph = AUGraph.CreateInstance();

            // getting audio node and audio unit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };
            int       remoteIONode = _auGraph.AddNode(cd);
            AudioUnit remoteIOUnit = _auGraph.GetNodeInfo(remoteIONode);

            // turning on microphone

            remoteIOUnit.SetEnableIO(true,
                                     AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                     1 // remote input
                                     );

            // audio canonical format
            AudioStreamBasicDescription audioFormat = CanonicalASBD(44100, 1);

            remoteIOUnit.SetAudioFormat(audioFormat,
                                        AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, // output bus of Remote input
                                        1                                                    // Remote input
                                        );
            remoteIOUnit.SetAudioFormat(audioFormat,
                                        AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                        0 // Remote output,
                                        );

            // Connecting Remote Input to Remote Output
            _auGraph.ConnnectNodeInput(
                remoteIONode, 1,
                remoteIONode, 0);

            // getting output audio format
            _audioUnitOutputFormat = remoteIOUnit.GetAudioFormat(
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, // Remote output bus
                0                                                    // Remote output
                );

            _auGraph.RenderCallback += new EventHandler <AudioGraphEventArgs>(_auGraph_RenderCallback);
            // graph initialization
            _auGraph.Initialize();
        }
        public void InitializeAUGraph()
        {
            Debug.Print("Initialize");

            LoadFiles();

            graph = new AUGraph();

            // create two AudioComponentDescriptions for the AUs we want in the graph

            // output unit
            var outputNode = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote));

            // mixer node
            var mixerNode = graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel));

            // connect a node's output to a node's input
            if (graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0) != AUGraphError.OK)
            {
                throw new ApplicationException();
            }

            // open the graph AudioUnits are open but not initialized (no resource allocation occurs here)
            if (graph.TryOpen() != 0)
            {
                throw new ApplicationException();
            }

            mixer = graph.GetNodeInfo(mixerNode);

            // set bus count
            const uint numbuses = 2;

            Debug.Print("Set input bus count {0}", numbuses);

            if (mixer.SetElementCount(AudioUnitScopeType.Input, numbuses) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            AudioStreamBasicDescription desc;

            for (uint i = 0; i < numbuses; ++i)
            {
                // setup render callback
                if (graph.SetNodeInputCallback(mixerNode, i, HandleRenderDelegate) != AUGraphError.OK)
                {
                    throw new ApplicationException();
                }

                // set input stream format to what we want
                desc = mixer.GetAudioFormat(AudioUnitScopeType.Input, i);
                //desc.ChangeNumberChannels(2, false);
                desc.SampleRate = GraphSampleRate;

                mixer.SetAudioFormat(desc, AudioUnitScopeType.Input, i);
            }

            // set output stream format to what we want
            desc = mixer.GetAudioFormat(AudioUnitScopeType.Output);

            //desc.ChangeNumberChannels(2, false);
            desc.SampleRate = GraphSampleRate;

            mixer.SetAudioFormat(desc, AudioUnitScopeType.Output);

            // now that we've set everything up we can initialize the graph, this will also validate the connections
            if (graph.Initialize() != AUGraphError.OK)
            {
                throw new ApplicationException();
            }
        }
        void StreamPropertyListenerProc(object sender, PropertyFoundEventArgs args)
        {
            if (args.Property == AudioFileStreamProperty.DataFormat)
            {
                dataFormat = audioFileStream.DataFormat;
                return;
            }

            if (args.Property != AudioFileStreamProperty.ReadyToProducePackets)
            {
                return;
            }

            if (audioQueue != null)
            {
                // TODO: Dispose
                throw new NotImplementedException();
            }

            audioQueue = new OutputAudioQueue(dataFormat);
            audioQueue.OutputCompleted += HandleOutputCompleted;

            AudioQueueStatus status;

            aqTap = audioQueue.CreateProcessingTap(TapProc, AudioQueueProcessingTapFlags.PreEffects, out status);
            if (status != AudioQueueStatus.Ok)
            {
                throw new ApplicationException("Could not create AQ tap");
            }

            // create an augraph to process in the tap. needs to convert from tapFormat to effect format and back

            /* note: this is invalidname's recipe to do an in-place effect when a format conversion is needed
             * before and after the effect, usually because effects want floats, and everything else in iOS
             * core audio works with ints (or, in rare cases, fixed-point).
             * the graph looks like this:
             * [render-callback] -> [converter] -> [effect] -> [converter] -> [generic-output]
             * prior to calling AudioUnitRender() on generic-output the ioData to a pointer that render-callback
             * knows about, and NULLs the ioData provided to AudioUnitRender(). the NULL tells generic-output to
             * pull from its upstream units (ie, the augraph), and copying off the ioData pointer allows the
             * render-callback	to provide it to the front of the stream. in some locales, this kind of shell game
             * is described as "batshit crazy", but it seems to work pretty well in practice.
             */

            auGraph = new AUGraph();
            auGraph.Open();
            var effectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.NewTimePitch));

            effectUnit = auGraph.GetNodeInfo(effectNode);

            var convertToEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertToEffectUnit = auGraph.GetNodeInfo(convertToEffectNode);

            var convertFromEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertFromEffectUnit = auGraph.GetNodeInfo(convertFromEffectNode);

            var genericOutputNode = auGraph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));

            genericOutputUnit = auGraph.GetNodeInfo(genericOutputNode);

            // set the format conversions throughout the graph
            var effectFormat = effectUnit.GetAudioFormat(AudioUnitScopeType.Output);
            var tapFormat    = aqTap.ProcessingFormat;

            convertToEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            convertToEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Output);

            convertFromEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Input);
            convertFromEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            // set maximum fames per slice higher (4096) so we don't get kAudioUnitErr_TooManyFramesToProcess
            const uint maxFramesPerSlice = 4096;

            if (convertToEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (effectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (convertFromEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (genericOutputUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            // connect the nodes
            auGraph.ConnnectNodeInput(convertToEffectNode, 0, effectNode, 0);
            auGraph.ConnnectNodeInput(effectNode, 0, convertFromEffectNode, 0);
            auGraph.ConnnectNodeInput(convertFromEffectNode, 0, genericOutputNode, 0);

            // set up the callback into the first convert unit
            if (convertToEffectUnit.SetRenderCallback(ConvertInputRenderCallback, AudioUnitScopeType.Global) != AudioUnitStatus.NoError)
            {
                throw new ApplicationException();
            }

            var res = auGraph.Initialize();

            if (res != AUGraphError.OK)
            {
                throw new ApplicationException();
            }
        }