コード例 #1
0
		bool createAUGraph ()
		{
			AUGraphError result = 0;
			int samplerNode, ioNode;

			var cd = new AudioComponentDescription () {
				ComponentManufacturer = AudioComponentManufacturerType.Apple,
				ComponentFlags = 0,
				ComponentFlagsMask = 0
			};

			processingGraph = new AUGraph ();

			cd.ComponentType = AudioComponentType.MusicDevice;
			cd.ComponentSubType = (int)AudioTypeMusicDevice.Sampler; //0x73616d70;
		
			samplerNode = processingGraph.AddNode (cd);

			cd.ComponentType = AudioComponentType.Output;
			cd.ComponentSubType = (int)AudioTypeOutput.Remote; //0x72696f63;

			ioNode = processingGraph.AddNode (cd);
		
			processingGraph.Open ();

			result = processingGraph.ConnnectNodeInput (samplerNode, 0, ioNode, 0);
			if (result != AUGraphError.OK)
				throw new Exception ("Unable to open the audio processing graph.  Error code: " + result);
			samplerUnit = processingGraph.GetNodeInfo (samplerNode);
			ioUnit = processingGraph.GetNodeInfo (ioNode);

			return true;
		}
コード例 #2
0
		bool CreateAUGraph ()
		{
			processingGraph = new AUGraph ();

			int samplerNode, ioNode;

			var musicSampler = new AudioComponentDescription () {
				ComponentManufacturer = AudioComponentManufacturerType.Apple,
				ComponentType = AudioComponentType.MusicDevice,
				ComponentSubType = (int)AudioTypeMusicDevice.Sampler
			};
			samplerNode = processingGraph.AddNode (musicSampler);

			var remoteOutput = new AudioComponentDescription () {
				ComponentManufacturer = AudioComponentManufacturerType.Apple,
				ComponentType = AudioComponentType.Output,
				ComponentSubType = (int)AudioTypeOutput.Remote
			};
			ioNode = processingGraph.AddNode (remoteOutput);

			processingGraph.Open ();

			processingGraph.ConnnectNodeInput (
				sourceNode: samplerNode, 
				sourceOutputNumber: 0, 
				destNode: ioNode, 
				destInputNumber: 0);

			samplerUnit = processingGraph.GetNodeInfo (samplerNode);

			ioUnit = processingGraph.GetNodeInfo (ioNode);

			return true;
		}
コード例 #3
0
ファイル: AUGraph.cs プロジェクト: 9drops/MonoTouch.AudioUnit
 public int AddNode(AudioComponentDescription cd)
 {
     int node = 0;
     int err = AUGraphAddNode(_auGraph, cd, ref node);
     if (err != 0)
         throw new ArgumentException(String.Format("Error code:", err));
     
     return node;
 }
コード例 #4
0
        void prepareAUGraph()
        {
            // Creating audio graph instance
            _auGraph = AUGraph.CreateInstance();

            // getting audio node and audio unit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags = 0,
                componentFlagsMask = 0
            };
            int remoteIONode = _auGraph.AddNode(cd);
            AudioUnit remoteIOUnit = _auGraph.GetNodeInfo(remoteIONode);

            // turning on microphone    
            
            remoteIOUnit.SetEnableIO(true,                
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                1 // remote input                
                );

            // audio canonical format
            AudioStreamBasicDescription audioFormat = CanonicalASBD(44100, 1);
            remoteIOUnit.SetAudioFormat(audioFormat,
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, // output bus of Remote input
                1 // Remote input
                );
            remoteIOUnit.SetAudioFormat(audioFormat,
                 AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                 0 // Remote output,
                 );

            // Connecting Remote Input to Remote Output
            _auGraph.ConnnectNodeInput(
                remoteIONode, 1,
                remoteIONode, 0);

            // getting output audio format
            _audioUnitOutputFormat = remoteIOUnit.GetAudioFormat(
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output,  // Remote output bus
                0 // Remote output
                );
            
            _auGraph.RenderCallback += new EventHandler<AudioGraphEventArgs>(_auGraph_RenderCallback);
            // graph initialization
            _auGraph.Initialize();
        }
コード例 #5
0
        public static AudioComponent FindNextComponent(AudioComponent cmp, AudioComponentDescription cd)
        {
            // Getting component hanlder
            IntPtr handler;
            if (cmp == null)
                handler = AudioComponentFindNext(IntPtr.Zero, cd);
            else
                handler = AudioComponentFindNext(cmp.Handler, cd);

            // creating an instance
            if (handler != IntPtr.Zero)
                return new AudioComponent(handler);
            else
                return null;

        }
コード例 #6
0
 static extern IntPtr AudioComponentFindNext(IntPtr inComponent, AudioComponentDescription inDesc);
コード例 #7
0
 public static AudioComponent FindComponent(AudioComponentDescription cd)
 {
     return(FindNextComponent(null, cd));
 }
コード例 #8
0
        void prepareAudioUnit()
        {
            // AudioSession
            AudioSession.Initialize();
            AudioSession.SetActive(true);
            AudioSession.Category = AudioSessionCategory.PlayAndRecord;
            AudioSession.PreferredHardwareIOBufferDuration = 0.01f;            

            // Creating AudioComponentDescription instance of RemoteIO Audio Unit
            var cd = new AudioComponentDescription()
            {
                componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags = 0,
                componentFlagsMask = 0
            };

            // Getting AudioComponent from the description
            _component = AudioComponent.FindComponent(cd);

            // Getting Audiounit
            _audioUnit = AudioUnit.CreateInstance(_component);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                1 // Remote Input
                );

            // setting AudioStreamBasicDescription
            int AudioUnitSampleTypeSize = (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) ? sizeof(float) : sizeof(uint);
            AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate = _sampleRate,
                Format = AudioFormatType.LinearPCM,
                //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift),
                FormatFlags = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)),
                ChannelsPerFrame = 2,
                BytesPerPacket = AudioUnitSampleTypeSize,
                BytesPerFrame = AudioUnitSampleTypeSize,
                FramesPerPacket = 1,
                BitsPerChannel = 8 * AudioUnitSampleTypeSize,
                Reserved = 0
            };
            _audioUnit.SetAudioFormat(audioFormat, 
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 
                0 // Remote output
                );
            _audioUnit.SetAudioFormat(audioFormat, 
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, 
                1 // Remote input
                );

            // setting callback
            /*
            if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR)
                _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback);
            else
                _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback);
            */
            _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(_callback);
            // initialize
            _audioUnit.Initialize();
        }
コード例 #9
0
ファイル: AUGraph.cs プロジェクト: 9drops/MonoTouch.AudioUnit
 static extern int AUGraphAddNode(IntPtr inGraph, AudioComponentDescription inDescription, ref int outNode);
コード例 #10
0
        void prepareAudioUnit()
        {
            // creating an AudioComponentDescription of the RemoteIO AudioUnit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags = 0,
                componentFlagsMask = 0
            };

            // Getting AudioComponent using the audio component description
            _audioComponent = AudioComponent.FindComponent(cd);

            // creating an audio unit instance
            _audioUnit = AudioUnit.CreateInstance(_audioComponent);

            // setting audio format
            _audioUnit.SetAudioFormat(_dstFormat, 
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 
                0 // Remote Output
                );            

            // setting callback method
            _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(_audioUnit_RenderCallback);

            _audioUnit.Initialize();
        }
コード例 #11
0
 static extern int AUGraphAddNode(IntPtr inGraph, AudioComponentDescription inDescription, ref int outNode);
コード例 #12
0
        void prepareAudioUnit()
        {
            // Creating AudioComponentDescription instance of RemoteIO Audio Unit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType    = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags = 0,
                componentFlagsMask = 0
            };
            
            // Getting AudioComponent from the description
            _component = AudioComponent.FindComponent(cd);
           
            // Getting Audiounit
            _audioUnit = AudioUnit.CreateInstance(_component);

            // setting AudioStreamBasicDescription
            int AudioUnitSampleTypeSize;
            if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR)
            {
                AudioUnitSampleTypeSize = sizeof(float);
            }
            else
            {
                AudioUnitSampleTypeSize = sizeof(int);
            }
            AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate = _sampleRate,
                Format = AudioFormatType.LinearPCM,
                //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift),
                FormatFlags = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)),
                ChannelsPerFrame = 2,
                BytesPerPacket = AudioUnitSampleTypeSize,
                BytesPerFrame = AudioUnitSampleTypeSize,
                FramesPerPacket = 1,
                BitsPerChannel = 8 * AudioUnitSampleTypeSize,
                Reserved = 0
            };
            _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0);            

            // setting callback
            /*
            if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR)
                _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback);
            else
                _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback);
             * */
            _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback);
        }
コード例 #13
0
        void prepareAudioUnit()
        {
            // AudioSession
            AudioSession.Initialize();
            AudioSession.SetActive(true);
            AudioSession.Category = AudioSessionCategory.PlayAndRecord;
            AudioSession.PreferredHardwareIOBufferDuration = 0.005f;            

            // creating an AudioComponentDescription of the RemoteIO AudioUnit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags = 0,
                componentFlagsMask = 0
            };

            // Getting AudioComponent using the audio component description
            _audioComponent = AudioComponent.FindComponent(cd);

            // creating an audio unit instance
            _audioUnit = AudioUnit.CreateInstance(_audioComponent);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                1 // Remote Input
                );

            // setting audio format
            _audioUnit.SetAudioFormat(_dstFormat, 
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 
                0 // Remote Output
                );            
            _audioUnit.SetAudioFormat( AudioUnitUtils.AUCanonicalASBD(_sampleRate, 2),                  
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output,                     
                1 // Remote input                     
                );


            // setting callback method
            _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(_audioUnit_RenderCallback);

            _audioUnit.Initialize();
            _audioUnit.Start();
        }
コード例 #14
0
ファイル: AUGraph.cs プロジェクト: 9drops/MonoTouch.AudioUnit
 static extern int AUGraphNodeInfo(IntPtr inGraph, int inNode, AudioComponentDescription outDescription, IntPtr outAudioUnit);
コード例 #15
0
 public static AudioComponent FindComponent(AudioComponentDescription cd)
 {
     return FindNextComponent(null, cd);
 }
コード例 #16
0
 static extern int AUGraphNodeInfo(IntPtr inGraph, int inNode, AudioComponentDescription outDescription, IntPtr outAudioUnit);
コード例 #17
0
 static extern IntPtr AudioComponentFindNext(IntPtr inComponent, AudioComponentDescription inDesc);
コード例 #18
0
        void prepareAudioUnit()
        {
            // Creating AudioComponentDescription instance of RemoteIO Audio Unit
            var cd = new AudioComponentDescription()
            {
                componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags = 0,
                componentFlagsMask = 0
            };

            // Getting AudioComponent from the description
            _component = AudioComponent.FindComponent(cd);

            // Getting Audiounit
            _audioUnit = AudioUnit.CreateInstance(_component);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                1 // Remote Input
                );

            // setting AudioStreamBasicDescription       
            var audioFormat = AudioUnitUtils.AUCanonicalASBD(44100.0, 2);            
            _audioUnit.SetAudioFormat(audioFormat,
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                0 // Remote output
                );            
            _audioUnit.SetAudioFormat(audioFormat,
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output,
                1 // Remote input
                );

            // setting callback
            _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(_audioUnit_RenderCallback);                
            // initialize
            _audioUnit.Initialize();
        }
コード例 #19
0
        void prepareAUGraph()
        {
            // Creating audio graph instance
            _auGraph = AUGraph.CreateInstance();

            // Adding Remote IO node  to AUGraph
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags = 0,
                componentFlagsMask = 0
            };
            int remoteIONode = _auGraph.AddNode(cd);

            // Preparing AudioComponentDescrption of MultiChannelMixer
            cd.componentType = AudioComponentDescription.AudioComponentType.kAudioUnitType_Mixer;
            cd.componentSubType = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_MultiChannelMixer;
            int multiChannelMixerNode = _auGraph.AddNode(cd);

            // Setting callback method as the case of Audio Unit            
            for (int i = 0; i < _waveDef.Length; i++)
            {
                var callbackStruct = new AudioUnit.AURenderCallbackStrct();
                callbackStruct.inputProc = device_renderCallback; // setting callback function
                callbackStruct.inputProcRefCon = GCHandle.ToIntPtr(_handle); // a pointer that passed to the renderCallback (IntPtr inRefCon) 
                _auGraph.AUGraphSetNodeInputCallback(
                    multiChannelMixerNode,
                    (uint)i, // bus number
                    callbackStruct);
            }            

            var remoteIOAudioUnit = _auGraph.GetNodeInfo(remoteIONode);
            var multiChannelMixerAudioUnit = _auGraph.GetNodeInfo(multiChannelMixerNode);

            // Getting an AudioUnit canonical description
            var audioFormat = AudioUnitUtils.AUCanonicalASBD(44100.0, 2);

            // applying the audio format to each audio units
            remoteIOAudioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0);
            multiChannelMixerAudioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0);
            
            // connecting multiChannelMixerNode(bus:0) to remoteIONode(bus:0)
            _auGraph.ConnnectNodeInput(
                multiChannelMixerNode, 
                0, // output bus                
                remoteIONode, 
                0  // input bus
                );
            
            // graph initialization
            _auGraph.Initialize();
        }
コード例 #20
0
ファイル: AppDelegate.cs プロジェクト: nagyist/Mozart
        void SetupRemoteIO()
        {
            AudioComponentDescription desc = new AudioComponentDescription ();
            desc.ComponentType = AudioComponentType.Output;
            desc.ComponentSubType = 0x72696f63;

            desc.ComponentManufacturer = AudioComponentManufacturerType.Apple;

            desc.ComponentFlags = 0;
            desc.ComponentFlagsMask = 0;

            var component = AudioComponent.FindNextComponent (null, desc);

            rioUnit = new AudioUnit (component);

            rioUnit.SetEnableIO (true, AudioUnitScopeType.Input, 1);
            rioUnit.SetRenderCallback (renderDelegate, AudioUnitScopeType.Input, 0);

            audioFormat = new AudioStreamBasicDescription();

            audioFormat.Format = AudioFormatType.LinearPCM;
            audioFormat.SampleRate = sampleRate;

            audioFormat.ChannelsPerFrame = 2;
            audioFormat.FramesPerPacket = 1;
            audioFormat.BitsPerChannel = 8 * sizeof(int);
            audioFormat.BytesPerPacket = sizeof(int);
            audioFormat.BytesPerFrame = sizeof(int);

            audioFormat.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsAudioUnitCanonical;

            rioUnit.SetAudioFormat (audioFormat, AudioUnitScopeType.Input, 0);
            rioUnit.SetAudioFormat (audioFormat, AudioUnitScopeType.Output, 1);
            rioUnit.Initialize ();

            unitCreated = true;

            FFTBufferManager = new FFTBufferManager (maxFPS, this);
            FFTBufferManager.Setup ();

            rioUnit.Start ();

            unitIsRunning = true;
        }