Ejemplo n.º 1
0
		void configureAndStartAudioProcessingGraph (AUGraph graph)
		{
			int result = 0;
			uint framesPerSlice = 0;

			result = ioUnit.Initialize ();
			if (result != 0)
				throw new Exception ("Unable to Initialize the I/O unit.  Error code: " + result);

			var status = ioUnit.SetSampleRate (graphSampleRate, AudioUnitScopeType.Output);
			if (status != AudioUnitStatus.NoError)
				throw new Exception ("AudioUnitSetProperty (set Sample output stream sample rate).  Error code: " + (int)status);

			framesPerSlice = ioUnit.GetMaximumFramesPerSlice (AudioUnitScopeType.Global);

			samplerUnit.SetSampleRate (graphSampleRate, AudioUnitScopeType.Output);
			samplerUnit.SetMaximumFramesPerSlice (framesPerSlice, AudioUnitScopeType.Global);

			if (graph != null) {
				result = (int)graph.Initialize ();
				if (result != (int)AUGraphError.OK)
					throw new Exception ("Unable to initialize AUGraph object.  Error code: " + result);

				result = (int)graph.Start ();
				if (result != (int)AUGraphError.OK)
					throw new Exception ("Unable to start audio processing graph.  Error code: " + result);

//				TODO: CAShow
				Console.WriteLine (graph);
			}
		}
Ejemplo n.º 2
0
        void SetupAUGraph()
        {
            graph = new AUGraph();

            AudioComponentDescription mixerDescription = new AudioComponentDescription();

            mixerDescription.ComponentType         = AudioComponentType.Mixer;
            mixerDescription.ComponentSubType      = (int)AudioTypeMixer.MultiChannel;
            mixerDescription.ComponentFlags        = 0;
            mixerDescription.ComponentFlagsMask    = 0;
            mixerDescription.ComponentManufacturer = AudioComponentManufacturerType.Apple;

            AudioComponentDescription outputDesciption = new AudioComponentDescription();

            outputDesciption.ComponentType         = AudioComponentType.Output;
            outputDesciption.ComponentSubType      = (int)AudioTypeOutput.System;
            outputDesciption.ComponentFlags        = 0;
            outputDesciption.ComponentFlagsMask    = 0;
            outputDesciption.ComponentManufacturer = AudioComponentManufacturerType.Apple;

            int mixerNode  = graph.AddNode(mixerDescription);
            int outputNode = graph.AddNode(outputDesciption);

            AUGraphError error = graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0);

            Assert.AreEqual(AUGraphError.OK, error);

            graph.Open();

            mMixer = graph.GetNodeInfo(mixerNode);

            AudioUnitStatus status = mMixer.SetElementCount(AudioUnitScopeType.Input, 0);

            Assert.AreEqual(AudioUnitStatus.OK, status);
        }
Ejemplo n.º 3
0
		bool CreateAUGraph ()
		{
			processingGraph = new AUGraph ();

			int samplerNode, ioNode;

			var musicSampler = new AudioComponentDescription () {
				ComponentManufacturer = AudioComponentManufacturerType.Apple,
				ComponentType = AudioComponentType.MusicDevice,
				ComponentSubType = (int)AudioTypeMusicDevice.Sampler
			};
			samplerNode = processingGraph.AddNode (musicSampler);

			var remoteOutput = new AudioComponentDescription () {
				ComponentManufacturer = AudioComponentManufacturerType.Apple,
				ComponentType = AudioComponentType.Output,
				ComponentSubType = (int)AudioTypeOutput.Remote
			};
			ioNode = processingGraph.AddNode (remoteOutput);

			processingGraph.Open ();

			processingGraph.ConnnectNodeInput (
				sourceNode: samplerNode,
				sourceOutputNumber: 0,
				destNode: ioNode,
				destInputNumber: 0);

			samplerUnit = processingGraph.GetNodeInfo (samplerNode);


			return true;
		}
Ejemplo n.º 4
0
        public ArrangementInstruments(Random r, AUGraph graph, int ioNode)
        {
            var mp = ArrangementPresets.Melody [r.Next(ArrangementPresets.Melody.Length)];

            Melody = new Instrument(mp, 0, graph, ioNode);

            var mp2 = mp;

            while (mp2.Preset == mp.Preset)
            {
                mp2 = ArrangementPresets.Changes [r.Next(ArrangementPresets.Changes.Length)];
            }
            Melody2 = new Instrument(mp2, 1, graph, ioNode);

            var cp = mp;

            while (cp.Preset == mp.Preset || cp.Preset == mp2.Preset)
            {
                cp = ArrangementPresets.Changes [r.Next(ArrangementPresets.Changes.Length)];
            }
            Changes = new Instrument(cp, 2, graph, ioNode);

            var bp = ArrangementPresets.Basses [r.Next(ArrangementPresets.Basses.Length)];

            Bass = new Instrument(bp, 3, graph, ioNode);

            Percussion = new Instrument(ArrangementPresets.Percussion, 4, graph, ioNode);
        }
Ejemplo n.º 5
0
		bool createAUGraph ()
		{
			AUGraphError result = 0;
			int samplerNode, ioNode;

			var cd = new AudioComponentDescription () {
				ComponentManufacturer = AudioComponentManufacturerType.Apple,
				ComponentFlags = 0,
				ComponentFlagsMask = 0
			};

			processingGraph = new AUGraph ();

			cd.ComponentType = AudioComponentType.MusicDevice;
			cd.ComponentSubType = (int)AudioTypeMusicDevice.Sampler; //0x73616d70;

			samplerNode = processingGraph.AddNode (cd);

			cd.ComponentType = AudioComponentType.Output;
			cd.ComponentSubType = (int)AudioTypeOutput.Remote; //0x72696f63;

			ioNode = processingGraph.AddNode (cd);

			processingGraph.Open ();

			result = processingGraph.ConnnectNodeInput (samplerNode, 0, ioNode, 0);
			if (result != AUGraphError.OK)
				throw new Exception ("Unable to open the audio processing graph.  Error code: " + result);
			samplerUnit = processingGraph.GetNodeInfo (samplerNode);
			ioUnit = processingGraph.GetNodeInfo (ioNode);

			return true;
		}
Ejemplo n.º 6
0
		public void GetElementCount ()
		{
			var graph = new AUGraph ();
			var mixerNode = graph.AddNode (AudioComponentDescription.CreateMixer (AudioTypeMixer.MultiChannel));
			graph.Open ();
			var mixer = graph.GetNodeInfo (mixerNode);
			Assert.AreEqual (1, mixer.GetElementCount (AudioUnitScopeType.Global));
		}
Ejemplo n.º 7
0
        void prepareAUGraph()
        {
            // Creating audio graph instance
            _auGraph = AUGraph.CreateInstance();

            // Adding Remote IO node  to AUGraph
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };
            int remoteIONode = _auGraph.AddNode(cd);

            // Preparing AudioComponentDescrption of MultiChannelMixer
            cd.componentType    = AudioComponentDescription.AudioComponentType.kAudioUnitType_Mixer;
            cd.componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_MultiChannelMixer;
            int multiChannelMixerNode = _auGraph.AddNode(cd);

            // Setting callback method as the case of Audio Unit
            for (int i = 0; i < _waveDef.Length; i++)
            {
                var callbackStruct = new AudioUnit.AURenderCallbackStrct();
                callbackStruct.inputProc       = device_renderCallback;      // setting callback function
                callbackStruct.inputProcRefCon = GCHandle.ToIntPtr(_handle); // a pointer that passed to the renderCallback (IntPtr inRefCon)
                _auGraph.AUGraphSetNodeInputCallback(
                    multiChannelMixerNode,
                    (uint)i, // bus number
                    callbackStruct);
            }

            var _remoteIO = _auGraph.GetNodeInfo(remoteIONode);
            var multiChannelMixerAudioUnit = _auGraph.GetNodeInfo(multiChannelMixerNode);


            // Getting an AudioUnit canonical description
            var audioFormat = AudioUnitUtils.AUCanonicalASBD(44100.0, 2);

            // applying the audio format to each audio units
            _remoteIO.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0);
            multiChannelMixerAudioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0);

            // connecting multiChannelMixerNode(bus:0) to remoteIONode(bus:0)
            _auGraph.ConnnectNodeInput(
                multiChannelMixerNode,
                0, // output bus
                remoteIONode,
                0  // input bus
                );

            // graph initialization
            _remoteIO.Initialize();
            _auGraph.Initialize();

            // mic setting
        }
Ejemplo n.º 8
0
        /// <summary>
        /// Builds the audio graph, initializes the mixer.
        /// </summary>
        protected void BuildAUGraph()
        {
            Graph = new AUGraph();

            // use splitter sub-type to create file writer tap

            // output unit. output to default audio device
            int outputNode = Graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Default));

            // mixer unit
            //int mixerNode = Graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel));

            //var mixerDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);
            MixerNode = AudioComponent.FindComponent(AudioTypeMixer.MultiChannel).CreateAudioUnit();

            // connect the mixer's output to the output's input
            //if (Graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0) != AUGraphError.OK)
            //{
            //    throw new ApplicationException();
            //}

            // open the graph
            if (Graph.TryOpen() != 0)
            {
                throw new ApplicationException();
            }

            Graph.SetNodeInputCallback(outputNode, 0, OutputRenderDelegate);

            Output = Graph.GetNodeInfo(outputNode);
            //MixerNode = Graph.GetNodeInfo(mixerNode);
            // must set ouput volume because it defaults to 0
            MixerNode.SetParameter(AudioUnitParameterType.MultiChannelMixerVolume, 1, AudioUnitScopeType.Output, 0);
            //MixerNode.SetMaximumFramesPerSlice(4096, AudioUnitScopeType.Global);

            ConfigureMixerInputs();

            AudioStreamBasicDescription desc;

            // set output stream format
            desc            = MixerNode.GetAudioFormat(AudioUnitScopeType.Output);
            desc.SampleRate = Metronome.SampleRate;
            if (MixerNode.SetFormat(desc, AudioUnitScopeType.Output) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            // now that we;ve set everything up we can initialize the graph, this will aslo validate the connections
            if (Graph.Initialize() != AUGraphError.OK)
            {
                throw new ApplicationException();
            }

            MixerNode.Initialize();
        }
Ejemplo n.º 9
0
		void ConfigureAndStartAudioProcessingGraph (AUGraph graph)
		{
			if (graph == null)
				return;

			var error = graph.Initialize ();
			if (error != AUGraphError.OK)
				throw new Exception ("Unable to initialize AUGraph object.  Error code: " + error);

			error = graph.Start ();
			if (error != AUGraphError.OK)
				throw new Exception ("Unable to start audio processing graph.  Error code: " + error);
		}
Ejemplo n.º 10
0
        void prepareAUGraph()
        {
            // Creating audio graph instance
            _auGraph = AUGraph.CreateInstance();

            // getting audio node and audio unit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };
            int       remoteIONode = _auGraph.AddNode(cd);
            AudioUnit remoteIOUnit = _auGraph.GetNodeInfo(remoteIONode);

            // turning on microphone

            remoteIOUnit.SetEnableIO(true,
                                     AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                     1 // remote input
                                     );

            // audio canonical format
            AudioStreamBasicDescription audioFormat = CanonicalASBD(44100, 1);

            remoteIOUnit.SetAudioFormat(audioFormat,
                                        AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, // output bus of Remote input
                                        1                                                    // Remote input
                                        );
            remoteIOUnit.SetAudioFormat(audioFormat,
                                        AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                        0 // Remote output,
                                        );

            // Connecting Remote Input to Remote Output
            _auGraph.ConnnectNodeInput(
                remoteIONode, 1,
                remoteIONode, 0);

            // getting output audio format
            _audioUnitOutputFormat = remoteIOUnit.GetAudioFormat(
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, // Remote output bus
                0                                                    // Remote output
                );

            _auGraph.RenderCallback += new EventHandler <AudioGraphEventArgs>(_auGraph_RenderCallback);
            // graph initialization
            _auGraph.Initialize();
        }
Ejemplo n.º 11
0
        public void CreateTest()
        {
            int errCode;

            using (var aug = AUGraph.Create(out errCode)) {
                Assert.NotNull(aug, "CreateTest");
                Assert.AreEqual(0, errCode, "CreateTest");

                // Make sure it is a working instance
                aug.Open();
                Assert.IsTrue(aug.IsOpen, "CreateTest #0");
                Assert.IsFalse(aug.IsInitialized, "CreateTest #0a");
                Assert.IsFalse(aug.IsRunning, "CreateTest #0b");
            }
        }
Ejemplo n.º 12
0
        void createAudioProcessGraph()
        {
            graph = new AUGraph();

            var sampler = graph.AddNode(AudioComponentDescription.CreateMusicDevice(AudioTypeMusicDevice.Sampler));
            var inout   = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote));

            graph.Open();
            graph.ConnnectNodeInput(sampler, 0, inout, 0);

            samplerUnit = graph.GetNodeInfo(sampler);

            graph.Initialize();
            graph.Start();
        }
Ejemplo n.º 13
0
        public Instrument(InstrumentInfo info, int channel, AUGraph graph, int ioNode)
        {
            Info = info;

            var samplerNode = graph.AddNode(AudioComponentDescription.CreateMusicDevice(AudioTypeMusicDevice.Sampler));

            graph.ConnnectNodeInput(samplerNode, 0, ioNode, (uint)channel);

            samplerUnit = graph.GetNodeInfo(samplerNode);
            samplerUnit.SetMaximumFramesPerSlice(4096, AudioUnitScopeType.Global, 0);

            this.channel = channel;

            LoadInstrument(info);
        }
Ejemplo n.º 14
0
        public void Connections()
        {
            using (var aug = new AUGraph()) {
                aug.Open();

                var node_1 = aug.AddNode(AudioComponentDescription.CreateGenerator(AudioTypeGenerator.AudioFilePlayer));
                var node_2 = aug.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));

                Assert.AreEqual(AUGraphError.OK, aug.ConnnectNodeInput(node_1, 0, node_2, 0), "#1");
                uint count;
                aug.GetNumberOfInteractions(out count);
                Assert.AreEqual(1, count, "#2");

                Assert.AreEqual(AUGraphError.OK, aug.Initialize(), "#3");

                Assert.AreEqual(AUGraphError.OK, aug.ClearConnections(), "#4");
                aug.GetNumberOfInteractions(out count);
                Assert.AreEqual(0, count, "#5");
            }
        }
Ejemplo n.º 15
0
        void ConfigureAndStartAudioProcessingGraph(AUGraph graph)
        {
            if (graph == null)
            {
                return;
            }

            var error = graph.Initialize();

            if (error != AUGraphError.OK)
            {
                throw new Exception("Unable to initialize AUGraph object.  Error code: " + error);
            }

            error = graph.Start();
            if (error != AUGraphError.OK)
            {
                throw new Exception("Unable to start audio processing graph.  Error code: " + error);
            }
        }
Ejemplo n.º 16
0
        void configureAndStartAudioProcessingGraph(AUGraph graph)
        {
            int  result         = 0;
            uint framesPerSlice = 0;

            result = ioUnit.Initialize();
            if (result != 0)
            {
                throw new Exception("Unable to Initialize the I/O unit.  Error code: " + result);
            }

            var status = ioUnit.SetSampleRate(graphSampleRate, AudioUnitScopeType.Output);

            if (status != AudioUnitStatus.NoError)
            {
                throw new Exception("AudioUnitSetProperty (set Sample output stream sample rate).  Error code: " + (int)status);
            }

            framesPerSlice = ioUnit.GetMaximumFramesPerSlice(AudioUnitScopeType.Global);

            samplerUnit.SetSampleRate(graphSampleRate, AudioUnitScopeType.Output);
            samplerUnit.SetMaximumFramesPerSlice(framesPerSlice, AudioUnitScopeType.Global);

            if (graph != null)
            {
                result = (int)graph.Initialize();
                if (result != (int)AUGraphError.OK)
                {
                    throw new Exception("Unable to initialize AUGraph object.  Error code: " + result);
                }

                result = (int)graph.Start();
                if (result != (int)AUGraphError.OK)
                {
                    throw new Exception("Unable to start audio processing graph.  Error code: " + result);
                }

//				TODO: CAShow
                Console.WriteLine(graph);
            }
        }
Ejemplo n.º 17
0
        AUGraph CreateAudioGraph()
        {
            var graph = new AUGraph();

            var ioNode = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote));
            var mix    = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);

            mixNode = graph.AddNode(mix);

            graph.ConnnectNodeInput(mixNode, 0, ioNode, 0);

            graph.Open();

            var mixUnit = graph.GetNodeInfo(mixNode);

            mixUnit.SetElementCount(AudioUnitScopeType.Input, 5);
//			mixUnit.SetParameter (AudioUnitParameterType.MultiChannelMixerVolume, 1, AudioUnitScopeType.Input, 0);
//			mixUnit.SetParameter (AudioUnitParameterType.MultiChannelMixerVolume, 1, AudioUnitScopeType.Input, 1);
            mixUnit.SetMaximumFramesPerSlice(4096, AudioUnitScopeType.Global, 0);

            return(graph);
        }
Ejemplo n.º 18
0
        bool CreateAUGraph()
        {
            processingGraph = new AUGraph();

            int samplerNode, ioNode;

            var musicSampler = new AudioComponentDescription()
            {
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
                ComponentType         = AudioComponentType.MusicDevice,
                ComponentSubType      = (int)AudioTypeMusicDevice.Sampler
            };

            samplerNode = processingGraph.AddNode(musicSampler);

            var remoteOutput = new AudioComponentDescription()
            {
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
                ComponentType         = AudioComponentType.Output,
                ComponentSubType      = (int)AudioTypeOutput.Remote
            };

            ioNode = processingGraph.AddNode(remoteOutput);

            processingGraph.Open();

            processingGraph.ConnnectNodeInput(
                sourceNode: samplerNode,
                sourceOutputNumber: 0,
                destNode: ioNode,
                destInputNumber: 0);

            samplerUnit = processingGraph.GetNodeInfo(samplerNode);

            ioUnit = processingGraph.GetNodeInfo(ioNode);

            return(true);
        }
Ejemplo n.º 19
0
        bool createAUGraph()
        {
            AUGraphError result = 0;
            int          samplerNode, ioNode;

            var cd = new AudioComponentDescription()
            {
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
                ComponentFlags        = 0,
                ComponentFlagsMask    = 0
            };

            processingGraph = new AUGraph();

            cd.ComponentType    = AudioComponentType.MusicDevice;
            cd.ComponentSubType = (int)AudioTypeMusicDevice.Sampler;             //0x73616d70;

            samplerNode = processingGraph.AddNode(cd);

            cd.ComponentType    = AudioComponentType.Output;
            cd.ComponentSubType = (int)AudioTypeOutput.Remote;             //0x72696f63;

            ioNode = processingGraph.AddNode(cd);

            processingGraph.Open();

            result = processingGraph.ConnnectNodeInput(samplerNode, 0, ioNode, 0);
            if (result != AUGraphError.OK)
            {
                throw new Exception("Unable to open the audio processing graph.  Error code: " + result);
            }
            samplerUnit = processingGraph.GetNodeInfo(samplerNode);
            ioUnit      = processingGraph.GetNodeInfo(ioNode);

            return(true);
        }
Ejemplo n.º 20
0
        public void BasicOperations()
        {
            using (var aug = new AUGraph()) {
                aug.Open();
                Assert.IsTrue(aug.IsOpen, "#0");
                Assert.IsFalse(aug.IsInitialized, "#0a");
                Assert.IsFalse(aug.IsRunning, "#0b");

                var node = aug.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));
                int count;
                Assert.AreEqual(AUGraphError.OK, aug.GetNodeCount(out count), "#1");
                Assert.AreEqual(1, count, "#2");

                var info = aug.GetNodeInfo(node);
                Assert.IsNotNull(info, "#3");

                int node2;
                Assert.AreEqual(AUGraphError.OK, aug.GetNode(0, out node2), "#4");
                Assert.AreEqual(1, node2, "#4a");

                float max_load;
                Assert.AreEqual(AUGraphError.OK, aug.GetMaxCPULoad(out max_load));
            }
        }
Ejemplo n.º 21
0
        public void InitializeAUGraph()
        {
            Debug.Print("Initialize");

            LoadFiles();

            graph = new AUGraph();

            // create two AudioComponentDescriptions for the AUs we want in the graph

            // output unit
            var outputNode = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote));

            // mixer node
            var mixerNode = graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel));

            // connect a node's output to a node's input
            if (graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0) != AUGraphError.OK)
            {
                throw new ApplicationException();
            }

            // open the graph AudioUnits are open but not initialized (no resource allocation occurs here)
            if (graph.TryOpen() != 0)
            {
                throw new ApplicationException();
            }

            mixer = graph.GetNodeInfo(mixerNode);

            // set bus count
            const uint numbuses = 2;

            Debug.Print("Set input bus count {0}", numbuses);

            if (mixer.SetElementCount(AudioUnitScopeType.Input, numbuses) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            AudioStreamBasicDescription desc;

            for (uint i = 0; i < numbuses; ++i)
            {
                // setup render callback
                if (graph.SetNodeInputCallback(mixerNode, i, HandleRenderDelegate) != AUGraphError.OK)
                {
                    throw new ApplicationException();
                }

                // set input stream format to what we want
                desc = mixer.GetAudioFormat(AudioUnitScopeType.Input, i);
                //desc.ChangeNumberChannels(2, false);
                desc.SampleRate = GraphSampleRate;

                mixer.SetAudioFormat(desc, AudioUnitScopeType.Input, i);
            }

            // set output stream format to what we want
            desc = mixer.GetAudioFormat(AudioUnitScopeType.Output);

            //desc.ChangeNumberChannels(2, false);
            desc.SampleRate = GraphSampleRate;

            mixer.SetAudioFormat(desc, AudioUnitScopeType.Output);

            // now that we've set everything up we can initialize the graph, this will also validate the connections
            if (graph.Initialize() != AUGraphError.OK)
            {
                throw new ApplicationException();
            }
        }
Ejemplo n.º 22
0
        // TODO: Find a way to assign the IntPtr to AUGraph class
        //		public AUGraph Graph {
        //			get {
        //				var pGraph = _Graph;
        //				var test = AUGraph.Create ();
        //			}
        //		}


        //		public AudioUnit.AudioUnit ConverterAudioUnit {
        //			get {
        //
        //			}
        //		}
        //
        //		public AudioUnit.AudioUnit MixerAudioUnit {
        //			get {
        //
        //			}
        //		}
        //
        //		public AudioUnit.AudioUnit OutputAudioUnit {
        //			get {
        //
        //			}
        //
        //				}

        public int ConnectOutputOfSourceNode(int sourceNode, uint sourceNodeOutputBus, int destinationNode, uint destinationNodeInputBus, AUGraph graph)
        {
            return(_ConnectOutputOfSourceNode(sourceNode, sourceNodeOutputBus, destinationNode, destinationNodeInputBus, graph.Handle));
        }
		void StreamPropertyListenerProc (object sender, PropertyFoundEventArgs args)
		{
			if (args.Property == AudioFileStreamProperty.DataFormat) {
				dataFormat = audioFileStream.DataFormat;
				return;
			}

			if (args.Property != AudioFileStreamProperty.ReadyToProducePackets)
				return;

			if (audioQueue != null) {
				// TODO: dispose old queue and its tap
				throw new NotImplementedException ();
			}

			audioQueue = new OutputAudioQueue (dataFormat);
			audioQueue.BufferCompleted += HandleBufferCompleted;

			AudioQueueStatus status;
			aqTap = audioQueue.CreateProcessingTap (TapProc, AudioQueueProcessingTapFlags.PreEffects, out status);
			if (status != AudioQueueStatus.Ok)
				throw new ApplicationException ("Could not create AQ tap");

			// create an augraph to process in the tap. needs to convert from tapFormat to effect format and back
			/* note: this is invalidname's recipe to do an in-place effect when a format conversion is needed
			before and after the effect, usually because effects want floats, and everything else in iOS
			core audio works with ints (or, in rare cases, fixed-point).
			the graph looks like this:
			[render-callback] -> [converter] -> [effect] -> [converter] -> [generic-output]
			prior to calling AudioUnitRender() on generic-output the ioData to a pointer that render-callback
			knows about, and NULLs the ioData provided to AudioUnitRender(). the NULL tells generic-output to
			pull from its upstream units (ie, the augraph), and copying off the ioData pointer allows the
			render-callback	to provide it to the front of the stream. in some locales, this kind of shell game
			is described as "batshit crazy", but it seems to work pretty well in practice.
			*/

			auGraph = new AUGraph ();
			auGraph.Open ();
			int effectNode = auGraph.AddNode (AudioComponentDescription.CreateConverter (AudioTypeConverter.NewTimePitch));
			effectUnit = auGraph.GetNodeInfo (effectNode);

			int convertToEffectNode = auGraph.AddNode (AudioComponentDescription.CreateConverter (AudioTypeConverter.AU));
			convertToEffectUnit = auGraph.GetNodeInfo (convertToEffectNode);

			int convertFromEffectNode = auGraph.AddNode (AudioComponentDescription.CreateConverter (AudioTypeConverter.AU));
			convertFromEffectUnit = auGraph.GetNodeInfo (convertFromEffectNode);

			int genericOutputNode = auGraph.AddNode (AudioComponentDescription.CreateOutput (AudioTypeOutput.Generic));
			genericOutputUnit = auGraph.GetNodeInfo (genericOutputNode);

			// set the format conversions throughout the graph
			AudioStreamBasicDescription effectFormat = effectUnit.GetAudioFormat (AudioUnitScopeType.Output);
			var tapFormat = aqTap.ProcessingFormat;

			convertToEffectUnit.SetAudioFormat (tapFormat, AudioUnitScopeType.Input);
			convertToEffectUnit.SetAudioFormat (effectFormat, AudioUnitScopeType.Output);

			convertFromEffectUnit.SetAudioFormat (effectFormat, AudioUnitScopeType.Input);
			convertFromEffectUnit.SetAudioFormat (tapFormat, AudioUnitScopeType.Output);

			genericOutputUnit.SetAudioFormat (tapFormat, AudioUnitScopeType.Input);
			genericOutputUnit.SetAudioFormat (tapFormat, AudioUnitScopeType.Output);

			// set maximum fames per slice higher (4096) so we don't get kAudioUnitErr_TooManyFramesToProcess
			const uint maxFramesPerSlice = 4096;
			if (convertToEffectUnit.SetMaximumFramesPerSlice (maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
				throw new ApplicationException ();
			if (effectUnit.SetMaximumFramesPerSlice (maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
				throw new ApplicationException ();
			if (convertFromEffectUnit.SetMaximumFramesPerSlice (maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
				throw new ApplicationException ();
			if (genericOutputUnit.SetMaximumFramesPerSlice (maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
				throw new ApplicationException ();

			// connect the nodes
			AUGraphError err = auGraph.ConnnectNodeInput (convertToEffectNode, 0, effectNode, 0);
			if (err != AUGraphError.OK)
				throw new InvalidOperationException ();

			err = auGraph.ConnnectNodeInput (effectNode, 0, convertFromEffectNode, 0);
			if (err != AUGraphError.OK)
				throw new InvalidOperationException ();

			err = auGraph.ConnnectNodeInput (convertFromEffectNode, 0, genericOutputNode, 0);
			if (err != AUGraphError.OK)
				throw new InvalidOperationException ();

			renderTimeStamp.SampleTime = 0;
			renderTimeStamp.Flags = AudioTimeStamp.AtsFlags.SampleTimeValid;

			// set up the callback into the first convert unit
			if (convertToEffectUnit.SetRenderCallback (ConvertInputRenderCallback, AudioUnitScopeType.Global) != AudioUnitStatus.NoError)
				throw new ApplicationException ();

			var res = auGraph.Initialize ();
			if (res != AUGraphError.OK)
				throw new ApplicationException ();
		}
Ejemplo n.º 24
0
        public AudioVoice(AudioEngine engine, SoundEffectInstance effectInstance, WaveFormat desiredFormat)
        {
            if (engine == null) throw new ArgumentNullException("engine");
            if (desiredFormat == null) throw new ArgumentNullException("desiredFormat");

            audioEngine = engine;
            soundEffectInstance = effectInstance;
            waveFormat = desiredFormat;
            BusIndexMixer = uint.MaxValue;

            if (desiredFormat.BitsPerSample != 16)
                throw new AudioSystemInternalException("Invalid Audio Format. " + desiredFormat.BitsPerSample + " bits by sample is not supported.");

            lock (StaticMembersLock)
            {
                if (nbOfInstances == 0)
                {
                    // Create the Audio Graph
                    audioGraph = new AUGraph();

                    // Open the graph (does not initialize it yet)
                    audioGraph.Open();
                    
                    // Create the AudioComponentDescrition corresponding to the IO Remote output and MultiChannelMixer 
                    var remoteInOutComponentDesc = AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote);
                    var mixerMultiChannelComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);
                    var mixer3DComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.Spacial);

                    // Add the Audio Unit nodes to the AudioGraph
                    var outputUnitNodeId = audioGraph.AddNode(remoteInOutComponentDesc);
                    var idChannelMixerNode = audioGraph.AddNode(mixerMultiChannelComponentDesc);
                    var id3DMixerNode = audioGraph.AddNode(mixer3DComponentDesc);

                    // Connect the nodes together
                    CheckGraphError(audioGraph.ConnnectNodeInput(idChannelMixerNode, 0, outputUnitNodeId, 0), "Connection of the graph node failed.");
                    CheckGraphError(audioGraph.ConnnectNodeInput(id3DMixerNode, 0, idChannelMixerNode, MaxNumberOfTracks), "Connection of the graph node failed.");

                    // Get the MixerUnit objects
                    unitChannelMixer = audioGraph.GetNodeInfo(idChannelMixerNode);
                    unit3DMixer = audioGraph.GetNodeInfo(id3DMixerNode);
                    
                    // Set the mixers' output formats (the stream format is propagated along the linked input during the graph initialization)
                    var desiredSampleRate = (engine.AudioSampleRate != 0) ? engine.AudioSampleRate : AudioUnitOutputSampleRate;
                    unit3DMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);
                    unitChannelMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);

                    // set the element count to the max number of possible tracks before initializing the audio graph
                    CheckUnitStatus(unitChannelMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks+1), string.Format("Failed to set element count on ChannelMixer [{0}]", MaxNumberOfTracks+1)); // +1 for the 3DMixer output
                    CheckUnitStatus(unit3DMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks), string.Format("Failed to set element count on 3DMixer [{0}]", MaxNumberOfTracks));

                    // set a null renderer callback to the channel and 3d mixer input bus
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unit3DMixer.Handle, i), "Failed to set the render callback");
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unitChannelMixer.Handle, i), "Failed to set the render callback");
                    }
                    
                    // Initialize the graph (validation of the topology)
                    CheckGraphError(audioGraph.Initialize(), "The audio graph initialization failed.");

                    // Start audio rendering
                    CheckGraphError(audioGraph.Start(), "Audio Graph could not start.");

                    // disable all the input bus at the beginning
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus(unitChannelMixer.SetParameter(AudioUnitParameterType.MultiChannelMixerEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the ChannelMixerInput.");
                        CheckUnitStatus(unit3DMixer.SetParameter(AudioUnitParameterType.Mixer3DEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the 3DMixerInput.");
                    }

                    // At initialization all UnitElement are available.
                    availableMixerBusIndices = new Queue<uint>();
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                        availableMixerBusIndices.Enqueue(i);
                }
                ++nbOfInstances;

                // Create a AudioDataRendererInfo for the sounds.
                pAudioDataRendererInfo = (AudioDataRendererInfo*)Utilities.AllocateClearedMemory(sizeof(AudioDataRendererInfo));
                pAudioDataRendererInfo->HandleChannelMixer = unitChannelMixer.Handle;
                pAudioDataRendererInfo->Handle3DMixer = unit3DMixer.Handle;
            }
        }
Ejemplo n.º 25
0
        public AudioVoice(AudioEngine engine, SoundEffectInstance effectInstance, WaveFormat desiredFormat)
        {
            if (engine == null)
            {
                throw new ArgumentNullException("engine");
            }
            if (desiredFormat == null)
            {
                throw new ArgumentNullException("desiredFormat");
            }

            audioEngine         = engine;
            soundEffectInstance = effectInstance;
            waveFormat          = desiredFormat;
            BusIndexMixer       = uint.MaxValue;

            if (desiredFormat.BitsPerSample != 16)
            {
                throw new AudioSystemInternalException("Invalid Audio Format. " + desiredFormat.BitsPerSample + " bits by sample is not supported.");
            }

            lock (StaticMembersLock)
            {
                if (nbOfInstances == 0)
                {
                    // Create the Audio Graph
                    audioGraph = new AUGraph();

                    // Open the graph (does not initialize it yet)
                    audioGraph.Open();

                    // Create the AudioComponentDescrition corresponding to the IO Remote output and MultiChannelMixer
                    var remoteInOutComponentDesc       = AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote);
                    var mixerMultiChannelComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);
                    var mixer3DComponentDesc           = AudioComponentDescription.CreateMixer(AudioTypeMixer.Spacial);

                    // Add the Audio Unit nodes to the AudioGraph
                    var outputUnitNodeId   = audioGraph.AddNode(remoteInOutComponentDesc);
                    var idChannelMixerNode = audioGraph.AddNode(mixerMultiChannelComponentDesc);
                    var id3DMixerNode      = audioGraph.AddNode(mixer3DComponentDesc);

                    // Connect the nodes together
                    CheckGraphError(audioGraph.ConnnectNodeInput(idChannelMixerNode, 0, outputUnitNodeId, 0), "Connection of the graph node failed.");
                    CheckGraphError(audioGraph.ConnnectNodeInput(id3DMixerNode, 0, idChannelMixerNode, MaxNumberOfTracks), "Connection of the graph node failed.");

                    // Get the MixerUnit objects
                    unitChannelMixer = audioGraph.GetNodeInfo(idChannelMixerNode);
                    unit3DMixer      = audioGraph.GetNodeInfo(id3DMixerNode);

                    // Set the mixers' output formats (the stream format is propagated along the linked input during the graph initialization)
                    var desiredSampleRate = (engine.AudioSampleRate != 0) ? engine.AudioSampleRate : AudioUnitOutputSampleRate;
                    unit3DMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);
                    unitChannelMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);

                    // set the element count to the max number of possible tracks before initializing the audio graph
                    CheckUnitStatus(unitChannelMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks + 1), string.Format("Failed to set element count on ChannelMixer [{0}]", MaxNumberOfTracks + 1)); // +1 for the 3DMixer output
                    CheckUnitStatus(unit3DMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks), string.Format("Failed to set element count on 3DMixer [{0}]", MaxNumberOfTracks));

                    // set a null renderer callback to the channel and 3d mixer input bus
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unit3DMixer.Handle, i), "Failed to set the render callback");
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unitChannelMixer.Handle, i), "Failed to set the render callback");
                    }

                    // Initialize the graph (validation of the topology)
                    CheckGraphError(audioGraph.Initialize(), "The audio graph initialization failed.");

                    // Start audio rendering
                    CheckGraphError(audioGraph.Start(), "Audio Graph could not start.");

                    // disable all the input bus at the beginning
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus(unitChannelMixer.SetParameter(AudioUnitParameterType.MultiChannelMixerEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the ChannelMixerInput.");
                        CheckUnitStatus(unit3DMixer.SetParameter((AudioUnitParameterType)_3DMixerParametersIds.Enable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the 3DMixerInput.");
                    }

                    // At initialization all UnitElement are available.
                    availableMixerBusIndices = new Queue <uint>();
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        availableMixerBusIndices.Enqueue(i);
                    }
                }
                ++nbOfInstances;

                // Create a AudioDataRendererInfo for the sounds.
                pAudioDataRendererInfo = (AudioDataRendererInfo *)Utilities.AllocateClearedMemory(sizeof(AudioDataRendererInfo));
                pAudioDataRendererInfo->HandleChannelMixer = unitChannelMixer.Handle;
                pAudioDataRendererInfo->Handle3DMixer      = unit3DMixer.Handle;
            }
        }
        void StreamPropertyListenerProc(object sender, PropertyFoundEventArgs args)
        {
            if (args.Property == AudioFileStreamProperty.DataFormat)
            {
                dataFormat = audioFileStream.DataFormat;
                return;
            }

            if (args.Property != AudioFileStreamProperty.ReadyToProducePackets)
            {
                return;
            }

            if (audioQueue != null)
            {
                // TODO: Dispose
                throw new NotImplementedException();
            }

            audioQueue = new OutputAudioQueue(dataFormat);
            audioQueue.BufferCompleted += HandleBufferCompleted;

            AudioQueueStatus status;

            aqTap = audioQueue.CreateProcessingTap(TapProc, AudioQueueProcessingTapFlags.PreEffects, out status);
            if (status != AudioQueueStatus.Ok)
            {
                throw new ApplicationException("Could not create AQ tap");
            }

            // create an augraph to process in the tap. needs to convert from tapFormat to effect format and back

            /* note: this is invalidname's recipe to do an in-place effect when a format conversion is needed
             * before and after the effect, usually because effects want floats, and everything else in iOS
             * core audio works with ints (or, in rare cases, fixed-point).
             * the graph looks like this:
             * [render-callback] -> [converter] -> [effect] -> [converter] -> [generic-output]
             * prior to calling AudioUnitRender() on generic-output the ioData to a pointer that render-callback
             * knows about, and NULLs the ioData provided to AudioUnitRender(). the NULL tells generic-output to
             * pull from its upstream units (ie, the augraph), and copying off the ioData pointer allows the
             * render-callback	to provide it to the front of the stream. in some locales, this kind of shell game
             * is described as "batshit crazy", but it seems to work pretty well in practice.
             */

            auGraph = new AUGraph();
            auGraph.Open();
            var effectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.NewTimePitch));

            effectUnit = auGraph.GetNodeInfo(effectNode);

            var convertToEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertToEffectUnit = auGraph.GetNodeInfo(convertToEffectNode);

            var convertFromEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertFromEffectUnit = auGraph.GetNodeInfo(convertFromEffectNode);

            var genericOutputNode = auGraph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));

            genericOutputUnit = auGraph.GetNodeInfo(genericOutputNode);

            // set the format conversions throughout the graph
            var effectFormat = effectUnit.GetAudioFormat(AudioUnitScopeType.Output);
            var tapFormat    = aqTap.ProcessingFormat;

            convertToEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            convertToEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Output);

            convertFromEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Input);
            convertFromEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            // set maximum fames per slice higher (4096) so we don't get kAudioUnitErr_TooManyFramesToProcess
            const uint maxFramesPerSlice = 4096;

            if (convertToEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (effectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (convertFromEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (genericOutputUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            // connect the nodes
            auGraph.ConnnectNodeInput(convertToEffectNode, 0, effectNode, 0);
            auGraph.ConnnectNodeInput(effectNode, 0, convertFromEffectNode, 0);
            auGraph.ConnnectNodeInput(convertFromEffectNode, 0, genericOutputNode, 0);

            // set up the callback into the first convert unit
            if (convertToEffectUnit.SetRenderCallback(ConvertInputRenderCallback, AudioUnitScopeType.Global) != AudioUnitStatus.NoError)
            {
                throw new ApplicationException();
            }

            var res = auGraph.Initialize();

            if (res != AUGraphError.OK)
            {
                throw new ApplicationException();
            }
        }
		public void InitializeAUGraph ()
		{
			Debug.Print ("Initialize");

			LoadFiles ();

			graph = new AUGraph ();

			// create two AudioComponentDescriptions for the AUs we want in the graph

			// output unit
			var outputNode = graph.AddNode (AudioComponentDescription.CreateOutput (AudioTypeOutput.Remote));

			// mixer node
			var mixerNode = graph.AddNode (AudioComponentDescription.CreateMixer (AudioTypeMixer.MultiChannel));

			// connect a node's output to a node's input
			if (graph.ConnnectNodeInput (mixerNode, 0, outputNode, 0) != AUGraphError.OK)
				throw new ApplicationException ();

			// open the graph AudioUnits are open but not initialized (no resource allocation occurs here)
			if (graph.TryOpen () != 0)
				throw new ApplicationException ();

			mixer = graph.GetNodeInfo (mixerNode);

			// set bus count
			const uint numbuses = 2;

			Debug.Print ("Set input bus count {0}", numbuses);

			if (mixer.SetElementCount (AudioUnitScopeType.Input, numbuses) != AudioUnitStatus.OK)
				throw new ApplicationException ();

			AudioStreamBasicDescription desc;

			for (uint i = 0; i < numbuses; ++i) {
				// setup render callback
				if (graph.SetNodeInputCallback (mixerNode, i, HandleRenderDelegate) != AUGraphError.OK)
					throw new ApplicationException ();

				// set input stream format to what we want
				desc = mixer.GetAudioFormat (AudioUnitScopeType.Input, i);
				//desc.ChangeNumberChannels(2, false);
				desc.SampleRate = GraphSampleRate;

				mixer.SetAudioFormat (desc, AudioUnitScopeType.Input, i);
			}

			// set output stream format to what we want
			desc = mixer.GetAudioFormat (AudioUnitScopeType.Output);

			//desc.ChangeNumberChannels(2, false);
			desc.SampleRate = GraphSampleRate;

			mixer.SetAudioFormat (desc, AudioUnitScopeType.Output);

			// now that we've set everything up we can initialize the graph, this will also validate the connections
			if (graph.Initialize () != AUGraphError.OK)
				throw new ApplicationException ();
		}