Example #1
0
        void prepareAUGraph()
        {
            // Creating audio graph instance
            _auGraph = AUGraph.CreateInstance();

            // getting audio node and audio unit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };
            int       remoteIONode = _auGraph.AddNode(cd);
            AudioUnit remoteIOUnit = _auGraph.GetNodeInfo(remoteIONode);

            // turning on microphone

            remoteIOUnit.SetEnableIO(true,
                                     AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                     1 // remote input
                                     );

            // audio canonical format
            AudioStreamBasicDescription audioFormat = CanonicalASBD(44100, 1);

            remoteIOUnit.SetAudioFormat(audioFormat,
                                        AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, // output bus of Remote input
                                        1                                                    // Remote input
                                        );
            remoteIOUnit.SetAudioFormat(audioFormat,
                                        AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                        0 // Remote output,
                                        );

            // Connecting Remote Input to Remote Output
            _auGraph.ConnnectNodeInput(
                remoteIONode, 1,
                remoteIONode, 0);

            // getting output audio format
            _audioUnitOutputFormat = remoteIOUnit.GetAudioFormat(
                AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output, // Remote output bus
                0                                                    // Remote output
                );

            _auGraph.RenderCallback += new EventHandler <AudioGraphEventArgs>(_auGraph_RenderCallback);
            // graph initialization
            _auGraph.Initialize();
        }
Example #2
0
        void prepareAudioUnit()
        {
            // Updated for deprecated AudioSession
            var     session = AVAudioSession.SharedInstance();
            NSError error;

            if (session == null)
            {
                var alert = new UIAlertView("Session error", "Unable to create audio session", null, "Cancel");
                alert.Show();
                alert.Clicked += delegate
                {
                    alert.DismissWithClickedButtonIndex(0, true);
                    return;
                };
            }
            session.SetActive(true);
            session.SetCategory(AVAudioSessionCategory.PlayAndRecord);
            session.SetPreferredIOBufferDuration(0.005, out error);

            // Getting AudioComponent Remote output
            _audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote);

            // creating an audio unit instance
            _audioUnit = new AudioUnit(_audioComponent);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                                   AudioUnitScopeType.Input,
                                   1 // Remote Input
                                   );

            // setting audio format
            _audioUnit.SetAudioFormat(_dstFormat,
                                      AudioUnitScopeType.Input,
                                      0 // Remote Output
                                      );

            var format = AudioStreamBasicDescription.CreateLinearPCM(_sampleRate, bitsPerChannel: 32);

            format.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsNativeFloat;
            _audioUnit.SetAudioFormat(format, AudioUnitScopeType.Output, 1);

            // setting callback method
            _audioUnit.SetRenderCallback(_audioUnit_RenderCallback, AudioUnitScopeType.Global);

            _audioUnit.Initialize();
            _audioUnit.Start();
        }
Example #3
0
        void prepareAudioUnit()
        {
            // creating an AudioComponentDescription of the RemoteIO AudioUnit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };

            // Getting AudioComponent using the audio component description
            _audioComponent = AudioComponent.FindComponent(cd);

            // creating an audio unit instance
            _audioUnit = AudioUnit.CreateInstance(_audioComponent);

            // setting audio format
            _audioUnit.SetAudioFormat(_dstFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                      0 // Remote Output
                                      );

            // setting callback method
            _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback);

            _audioUnit.Initialize();
        }
        void prepareAudioUnit()
        {
            // AudioSession
            AudioSession.Initialize();
            AudioSession.SetActive(true);
            AudioSession.Category = AudioSessionCategory.PlayAndRecord;
            AudioSession.PreferredHardwareIOBufferDuration = 0.005f;

            // creating an AudioComponentDescription of the RemoteIO AudioUnit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };

            // Getting AudioComponent using the audio component description
            _audioComponent = AudioComponent.FindComponent(cd);

            // creating an audio unit instance
            _audioUnit = AudioUnit.CreateInstance(_audioComponent);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                                   AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                   1 // Remote Input
                                   );

            // setting audio format
            _audioUnit.SetAudioFormat(_dstFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                      0 // Remote Output
                                      );
            _audioUnit.SetAudioFormat(AudioUnitUtils.AUCanonicalASBD(_sampleRate, 2),
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output,
                                      1 // Remote input
                                      );


            // setting callback method
            _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback);

            _audioUnit.Initialize();
            _audioUnit.Start();
        }
Example #5
0
        void prepareAudioUnit()
        {
            // Creating AudioComponentDescription instance of RemoteIO Audio Unit
            AudioComponentDescription cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };

            // Getting AudioComponent from the description
            _component = AudioComponent.FindComponent(cd);

            // Getting Audiounit
            _audioUnit = AudioUnit.CreateInstance(_component);

            // setting AudioStreamBasicDescription
            int AudioUnitSampleTypeSize;

            if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR)
            {
                AudioUnitSampleTypeSize = sizeof(float);
            }
            else
            {
                AudioUnitSampleTypeSize = sizeof(int);
            }
            AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate = _sampleRate,
                Format     = AudioFormatType.LinearPCM,
                //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift),
                FormatFlags      = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)),
                ChannelsPerFrame = 2,
                BytesPerPacket   = AudioUnitSampleTypeSize,
                BytesPerFrame    = AudioUnitSampleTypeSize,
                FramesPerPacket  = 1,
                BitsPerChannel   = 8 * AudioUnitSampleTypeSize,
                Reserved         = 0
            };

            _audioUnit.SetAudioFormat(audioFormat, AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input, 0);

            // setting callback

            /*
             * if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR)
             *  _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback);
             * else
             *  _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback);
             * */
            _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(device_callback);
        }
Example #6
0
        public IOSAudioProcessor()
        {
            var inputComponent = AudioComponent.FindNextComponent(
                null,
                new AudioComponentDescription
            {
                ComponentFlags        = 0,
                ComponentFlagsMask    = 0,
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
                ComponentSubType      = (int)AudioTypeOutput.Remote,
                ComponentType         = AudioComponentType.Output
            });

            recorder = inputComponent.CreateAudioUnit();
            recorder.SetEnableIO(true, AudioUnitScopeType.Input, inputBus);
            recorder.SetEnableIO(false, AudioUnitScopeType.Output, outputBus);

            var audioFormat = new AudioStreamBasicDescription
            {
                SampleRate       = StudentDemo.Globals.SAMPLERATE,
                Format           = AudioFormatType.LinearPCM,
                FormatFlags      = AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsPacked,
                FramesPerPacket  = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel   = 16,
                BytesPerPacket   = 2,
                BytesPerFrame    = 2
            };

            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Output, inputBus);
            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Input, outputBus);

            recorder.SetInputCallback(AudioInputCallBack, AudioUnitScopeType.Global, inputBus);

            // TODO: Disable buffers (requires interop)
            aBuffer = new AudioBuffer
            {
                NumberChannels = 1,
                DataByteSize   = 512 * 2,
                Data           = System.Runtime.InteropServices.Marshal.AllocHGlobal(512 * 2)
            };
        }
Example #7
0
        void prepareAudioUnit()
        {
            // Creating AudioComponentDescription instance of RemoteIO Audio Unit
            var cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };

            // Getting AudioComponent from the description
            _component = AudioComponent.FindComponent(cd);

            // Getting Audiounit
            _audioUnit = AudioUnit.CreateInstance(_component);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                                   AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                   1 // Remote Input
                                   );

            // setting AudioStreamBasicDescription
            var audioFormat = AudioUnitUtils.AUCanonicalASBD(44100.0, 2);

            _audioUnit.SetAudioFormat(audioFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                      0 // Remote output
                                      );
            _audioUnit.SetAudioFormat(audioFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output,
                                      1 // Remote input
                                      );

            // setting callback
            _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_audioUnit_RenderCallback);
            // initialize
            _audioUnit.Initialize();
        }
        void prepareAudioUnit()
        {
            // AudioSession
            AudioSession.Initialize();
            AudioSession.SetActive(true);
            AudioSession.Category = AudioSessionCategory.PlayAndRecord;
            AudioSession.PreferredHardwareIOBufferDuration = 0.005f;

            // Getting AudioComponent Remote output
            _audioComponent = AudioComponent.FindComponent(AudioTypeOutput.Remote);

            // creating an audio unit instance
            _audioUnit = new AudioUnit(_audioComponent);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                                   AudioUnitScopeType.Input,
                                   1 // Remote Input
                                   );

            // setting audio format
            _audioUnit.SetAudioFormat(_dstFormat,
                                      AudioUnitScopeType.Input,
                                      0 // Remote Output
                                      );

            var format = AudioStreamBasicDescription.CreateLinearPCM(_sampleRate, bitsPerChannel: 32);

            format.FormatFlags = AudioStreamBasicDescription.AudioFormatFlagsAudioUnitCanonical;
            _audioUnit.SetAudioFormat(format, AudioUnitScopeType.Output, 1);

            // setting callback method
            _audioUnit.SetRenderCallback(_audioUnit_RenderCallback, AudioUnitScopeType.Global);

            _audioUnit.Initialize();
            _audioUnit.Start();
        }
Example #9
0
        public SoundWAVInstance(SoundWAV sound, bool looped)
            : base(sound)
        {
            this.sound  = sound;
            this.looped = looped;
            State       = SoundStates.Stopped;
            volume      = 1;

            instance = new AudioUnit(sound.audio.component);
            instance.SetAudioFormat(sound.desc, AudioUnitScopeType.Input, 0);
            if (sound.channels == 2)
            {
                switch (sound.bitDepth)
                {
                case 8: instance.RenderCallback += render2Channel8BitCallback; break;

                case 16: instance.RenderCallback += render2Channel16BitCallback; break;

                default: Debug.ThrowError("SoundWAVInstance", "Unsuported WAV bit depth"); break;
                }
            }
            else
            {
                switch (sound.bitDepth)
                {
                case 8: instance.RenderCallback += render1Channel8BitCallback; break;

                case 16: instance.RenderCallback += render1Channel16BitCallback; break;

                default: Debug.ThrowError("SoundWAVInstance", "Unsuported WAV bit depth"); break;
                }
            }
            instance.Initialize();

                        #if iOS
            OS.CurrentApplication.PauseCallback  += Pause;
            OS.CurrentApplication.ResumeCallback += resume;
                        #endif
        }
Example #10
0
        private void startTalking(UdpClient audioCaller)
        {
            //Stop old recording session

            //Generate new WaveFormat
            //    recorder.WaveFormat = new WaveFormat(16000, 16, 1);
            //    recorder.BufferMilliseconds = 50;
            //    recorder.DataAvailable += SendAudio; //Add event to SendAudio


//			recorder = new InputAudioQueue (playerFormat);
//
//
//			for (int i = 0; i < BUFFERCOUNT; i++) {
//				IntPtr aBUff;
//				//recorder.AllocateBuffer (AUDIOBUFFERSIZE, out aBUff);
//				byteSize = AUDIOBUFFERSIZE * playerFormat.BytesPerPacket;
//				recorder.AllocateBufferWithPacketDescriptors (byteSize, AUDIOBUFFERSIZE, out aBUff);
//				recorder.EnqueueBuffer (aBUff, byteSize, null);
//				Console.WriteLine ("Buffer allocated, enqueueing");
//			}

            //New stuffs

            var inputComponent = AudioComponent.FindNextComponent(
                null,
                new AudioComponentDescription
            {
                ComponentFlags        = 0,
                ComponentFlagsMask    = 0,
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
                ComponentSubType      = (int)AudioTypeOutput.Remote,
                ComponentType         = AudioComponentType.Output
            });

            recorder = inputComponent.CreateAudioUnit();
            recorder.SetEnableIO(true, AudioUnitScopeType.Input, inputBus);
            recorder.SetEnableIO(false, AudioUnitScopeType.Output, outputBus);

            var audioFormat = new AudioStreamBasicDescription
            {
                SampleRate       = Globals.SAMPLERATE,
                Format           = AudioFormatType.LinearPCM,
                FormatFlags      = AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsPacked,
                FramesPerPacket  = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel   = 16,
                BytesPerPacket   = 2,
                BytesPerFrame    = 2
            };

            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Output, inputBus);
            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Input, outputBus);

            recorder.SetInputCallback(AudioInputCallBack, AudioUnitScopeType.Global, inputBus);

            // TODO: Disable buffers (requires interop)
            aBuffer = new AudioBuffer
            {
                NumberChannels = 1,
                DataByteSize   = 512 * 2,
                Data           = System.Runtime.InteropServices.Marshal.AllocHGlobal(512 * 2)
            };
            isTalking = true;
            //recorder.InputCompleted += SendAudio;
            //recorder.Start ();

            recorder.Initialize();
            recorder.Start();
        }
Example #11
0
        public AudioVoice(AudioEngine engine, SoundEffectInstance effectInstance, WaveFormat desiredFormat)
        {
            if (engine == null)
            {
                throw new ArgumentNullException("engine");
            }
            if (desiredFormat == null)
            {
                throw new ArgumentNullException("desiredFormat");
            }

            audioEngine         = engine;
            soundEffectInstance = effectInstance;
            waveFormat          = desiredFormat;
            BusIndexMixer       = uint.MaxValue;

            if (desiredFormat.BitsPerSample != 16)
            {
                throw new AudioSystemInternalException("Invalid Audio Format. " + desiredFormat.BitsPerSample + " bits by sample is not supported.");
            }

            lock (StaticMembersLock)
            {
                if (nbOfInstances == 0)
                {
                    // Create the Audio Graph
                    audioGraph = new AUGraph();

                    // Open the graph (does not initialize it yet)
                    audioGraph.Open();

                    // Create the AudioComponentDescrition corresponding to the IO Remote output and MultiChannelMixer
                    var remoteInOutComponentDesc       = AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote);
                    var mixerMultiChannelComponentDesc = AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel);
                    var mixer3DComponentDesc           = AudioComponentDescription.CreateMixer(AudioTypeMixer.Spacial);

                    // Add the Audio Unit nodes to the AudioGraph
                    var outputUnitNodeId   = audioGraph.AddNode(remoteInOutComponentDesc);
                    var idChannelMixerNode = audioGraph.AddNode(mixerMultiChannelComponentDesc);
                    var id3DMixerNode      = audioGraph.AddNode(mixer3DComponentDesc);

                    // Connect the nodes together
                    CheckGraphError(audioGraph.ConnnectNodeInput(idChannelMixerNode, 0, outputUnitNodeId, 0), "Connection of the graph node failed.");
                    CheckGraphError(audioGraph.ConnnectNodeInput(id3DMixerNode, 0, idChannelMixerNode, MaxNumberOfTracks), "Connection of the graph node failed.");

                    // Get the MixerUnit objects
                    unitChannelMixer = audioGraph.GetNodeInfo(idChannelMixerNode);
                    unit3DMixer      = audioGraph.GetNodeInfo(id3DMixerNode);

                    // Set the mixers' output formats (the stream format is propagated along the linked input during the graph initialization)
                    var desiredSampleRate = (engine.AudioSampleRate != 0) ? engine.AudioSampleRate : AudioUnitOutputSampleRate;
                    unit3DMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);
                    unitChannelMixer.SetAudioFormat(CreateLinear16BitsPcm(2, desiredSampleRate), AudioUnitScopeType.Output);

                    // set the element count to the max number of possible tracks before initializing the audio graph
                    CheckUnitStatus(unitChannelMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks + 1), string.Format("Failed to set element count on ChannelMixer [{0}]", MaxNumberOfTracks + 1)); // +1 for the 3DMixer output
                    CheckUnitStatus(unit3DMixer.SetElementCount(AudioUnitScopeType.Input, MaxNumberOfTracks), string.Format("Failed to set element count on 3DMixer [{0}]", MaxNumberOfTracks));

                    // set a null renderer callback to the channel and 3d mixer input bus
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unit3DMixer.Handle, i), "Failed to set the render callback");
                        CheckUnitStatus((AudioUnitStatus)SetInputRenderCallbackToNull(unitChannelMixer.Handle, i), "Failed to set the render callback");
                    }

                    // Initialize the graph (validation of the topology)
                    CheckGraphError(audioGraph.Initialize(), "The audio graph initialization failed.");

                    // Start audio rendering
                    CheckGraphError(audioGraph.Start(), "Audio Graph could not start.");

                    // disable all the input bus at the beginning
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        CheckUnitStatus(unitChannelMixer.SetParameter(AudioUnitParameterType.MultiChannelMixerEnable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the ChannelMixerInput.");
                        CheckUnitStatus(unit3DMixer.SetParameter((AudioUnitParameterType)_3DMixerParametersIds.Enable, 0f, AudioUnitScopeType.Input, i), "Failed to enable/disable the 3DMixerInput.");
                    }

                    // At initialization all UnitElement are available.
                    availableMixerBusIndices = new Queue <uint>();
                    for (uint i = 0; i < MaxNumberOfTracks; i++)
                    {
                        availableMixerBusIndices.Enqueue(i);
                    }
                }
                ++nbOfInstances;

                // Create a AudioDataRendererInfo for the sounds.
                pAudioDataRendererInfo = (AudioDataRendererInfo *)Utilities.AllocateClearedMemory(sizeof(AudioDataRendererInfo));
                pAudioDataRendererInfo->HandleChannelMixer = unitChannelMixer.Handle;
                pAudioDataRendererInfo->Handle3DMixer      = unit3DMixer.Handle;
            }
        }
        public void InitializeAUGraph()
        {
            Debug.Print("Initialize");

            LoadFiles();

            graph = new AUGraph();

            // create two AudioComponentDescriptions for the AUs we want in the graph

            // output unit
            var outputNode = graph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Remote));

            // mixer node
            var mixerNode = graph.AddNode(AudioComponentDescription.CreateMixer(AudioTypeMixer.MultiChannel));

            // connect a node's output to a node's input
            if (graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0) != AUGraphError.OK)
            {
                throw new ApplicationException();
            }

            // open the graph AudioUnits are open but not initialized (no resource allocation occurs here)
            if (graph.TryOpen() != 0)
            {
                throw new ApplicationException();
            }

            mixer = graph.GetNodeInfo(mixerNode);

            // set bus count
            const uint numbuses = 2;

            Debug.Print("Set input bus count {0}", numbuses);

            if (mixer.SetElementCount(AudioUnitScopeType.Input, numbuses) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            AudioStreamBasicDescription desc;

            for (uint i = 0; i < numbuses; ++i)
            {
                // setup render callback
                if (graph.SetNodeInputCallback(mixerNode, i, HandleRenderDelegate) != AUGraphError.OK)
                {
                    throw new ApplicationException();
                }

                // set input stream format to what we want
                desc = mixer.GetAudioFormat(AudioUnitScopeType.Input, i);
                //desc.ChangeNumberChannels(2, false);
                desc.SampleRate = GraphSampleRate;

                mixer.SetAudioFormat(desc, AudioUnitScopeType.Input, i);
            }

            // set output stream format to what we want
            desc = mixer.GetAudioFormat(AudioUnitScopeType.Output);

            //desc.ChangeNumberChannels(2, false);
            desc.SampleRate = GraphSampleRate;

            mixer.SetAudioFormat(desc, AudioUnitScopeType.Output);

            // now that we've set everything up we can initialize the graph, this will also validate the connections
            if (graph.Initialize() != AUGraphError.OK)
            {
                throw new ApplicationException();
            }
        }
        void StreamPropertyListenerProc(object sender, PropertyFoundEventArgs args)
        {
            if (args.Property == AudioFileStreamProperty.DataFormat)
            {
                dataFormat = audioFileStream.DataFormat;
                return;
            }

            if (args.Property != AudioFileStreamProperty.ReadyToProducePackets)
            {
                return;
            }

            if (audioQueue != null)
            {
                // TODO: Dispose
                throw new NotImplementedException();
            }

            audioQueue = new OutputAudioQueue(dataFormat);
            audioQueue.OutputCompleted += HandleOutputCompleted;

            AudioQueueStatus status;

            aqTap = audioQueue.CreateProcessingTap(TapProc, AudioQueueProcessingTapFlags.PreEffects, out status);
            if (status != AudioQueueStatus.Ok)
            {
                throw new ApplicationException("Could not create AQ tap");
            }

            // create an augraph to process in the tap. needs to convert from tapFormat to effect format and back

            /* note: this is invalidname's recipe to do an in-place effect when a format conversion is needed
             * before and after the effect, usually because effects want floats, and everything else in iOS
             * core audio works with ints (or, in rare cases, fixed-point).
             * the graph looks like this:
             * [render-callback] -> [converter] -> [effect] -> [converter] -> [generic-output]
             * prior to calling AudioUnitRender() on generic-output the ioData to a pointer that render-callback
             * knows about, and NULLs the ioData provided to AudioUnitRender(). the NULL tells generic-output to
             * pull from its upstream units (ie, the augraph), and copying off the ioData pointer allows the
             * render-callback	to provide it to the front of the stream. in some locales, this kind of shell game
             * is described as "batshit crazy", but it seems to work pretty well in practice.
             */

            auGraph = new AUGraph();
            auGraph.Open();
            var effectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.NewTimePitch));

            effectUnit = auGraph.GetNodeInfo(effectNode);

            var convertToEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertToEffectUnit = auGraph.GetNodeInfo(convertToEffectNode);

            var convertFromEffectNode = auGraph.AddNode(AudioComponentDescription.CreateConverter(AudioTypeConverter.AU));
            var convertFromEffectUnit = auGraph.GetNodeInfo(convertFromEffectNode);

            var genericOutputNode = auGraph.AddNode(AudioComponentDescription.CreateOutput(AudioTypeOutput.Generic));

            genericOutputUnit = auGraph.GetNodeInfo(genericOutputNode);

            // set the format conversions throughout the graph
            var effectFormat = effectUnit.GetAudioFormat(AudioUnitScopeType.Output);
            var tapFormat    = aqTap.ProcessingFormat;

            convertToEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            convertToEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Output);

            convertFromEffectUnit.SetAudioFormat(effectFormat, AudioUnitScopeType.Input);
            convertFromEffectUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Input);
            genericOutputUnit.SetAudioFormat(tapFormat, AudioUnitScopeType.Output);

            // set maximum fames per slice higher (4096) so we don't get kAudioUnitErr_TooManyFramesToProcess
            const uint maxFramesPerSlice = 4096;

            if (convertToEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (effectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (convertFromEffectUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }
            if (genericOutputUnit.SetMaximumFramesPerSlice(maxFramesPerSlice, AudioUnitScopeType.Global) != AudioUnitStatus.OK)
            {
                throw new ApplicationException();
            }

            // connect the nodes
            auGraph.ConnnectNodeInput(convertToEffectNode, 0, effectNode, 0);
            auGraph.ConnnectNodeInput(effectNode, 0, convertFromEffectNode, 0);
            auGraph.ConnnectNodeInput(convertFromEffectNode, 0, genericOutputNode, 0);

            // set up the callback into the first convert unit
            if (convertToEffectUnit.SetRenderCallback(ConvertInputRenderCallback, AudioUnitScopeType.Global) != AudioUnitStatus.NoError)
            {
                throw new ApplicationException();
            }

            var res = auGraph.Initialize();

            if (res != AUGraphError.OK)
            {
                throw new ApplicationException();
            }
        }
Example #14
0
        public SoundWAVInstance(SoundWAV sound, bool looped)
            : base(sound)
        {
            this.sound = sound;
            this.looped = looped;
            State = SoundStates.Stopped;
            volume = 1;

            instance = new AudioUnit(sound.audio.component);
            instance.SetAudioFormat(sound.desc, AudioUnitScopeType.Input, 0);
            if (sound.channels == 2)
            {
                switch (sound.bitDepth)
                {
                    case 8: instance.RenderCallback += render2Channel8BitCallback; break;
                    case 16: instance.RenderCallback += render2Channel16BitCallback; break;
                    default: Debug.ThrowError("SoundWAVInstance", "Unsuported WAV bit depth"); break;
                }
            }
            else
            {
                switch (sound.bitDepth)
                {
                    case 8: instance.RenderCallback += render1Channel8BitCallback; break;
                    case 16: instance.RenderCallback += render1Channel16BitCallback; break;
                    default: Debug.ThrowError("SoundWAVInstance", "Unsuported WAV bit depth"); break;
                }
            }
            instance.Initialize();

            #if iOS
            OS.CurrentApplication.PauseCallback += Pause;
            OS.CurrentApplication.ResumeCallback += resume;
            #endif
        }
Example #15
0
        void prepareAudioUnit()
        {
            // AudioSession
            AudioSession.Initialize();
            AudioSession.SetActive(true);
            AudioSession.Category = AudioSessionCategory.PlayAndRecord;
            AudioSession.PreferredHardwareIOBufferDuration = 0.01f;

            // Creating AudioComponentDescription instance of RemoteIO Audio Unit
            var cd = new AudioComponentDescription()
            {
                componentType         = AudioComponentDescription.AudioComponentType.kAudioUnitType_Output,
                componentSubType      = AudioComponentDescription.AudioComponentSubType.kAudioUnitSubType_RemoteIO,
                componentManufacturer = AudioComponentDescription.AudioComponentManufacturerType.kAudioUnitManufacturer_Apple,
                componentFlags        = 0,
                componentFlagsMask    = 0
            };

            // Getting AudioComponent from the description
            _component = AudioComponent.FindComponent(cd);

            // Getting Audiounit
            _audioUnit = AudioUnit.CreateInstance(_component);

            // turning on microphone
            _audioUnit.SetEnableIO(true,
                                   AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                   1 // Remote Input
                                   );

            // setting AudioStreamBasicDescription
            int AudioUnitSampleTypeSize             = (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) ? sizeof(float) : sizeof(uint);
            AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription()
            {
                SampleRate = _sampleRate,
                Format     = AudioFormatType.LinearPCM,
                //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift),
                FormatFlags      = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(kAudioUnitSampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)),
                ChannelsPerFrame = 2,
                BytesPerPacket   = AudioUnitSampleTypeSize,
                BytesPerFrame    = AudioUnitSampleTypeSize,
                FramesPerPacket  = 1,
                BitsPerChannel   = 8 * AudioUnitSampleTypeSize,
                Reserved         = 0
            };

            _audioUnit.SetAudioFormat(audioFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Input,
                                      0 // Remote output
                                      );
            _audioUnit.SetAudioFormat(audioFormat,
                                      AudioUnit.AudioUnitScopeType.kAudioUnitScope_Output,
                                      1 // Remote input
                                      );

            // setting callback

            /*
             * if (MonoTouch.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR)
             *  _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(simulator_callback);
             * else
             *  _audioUnit.RenderCallback += new EventHandler<AudioUnitEventArgs>(device_callback);
             */
            _audioUnit.RenderCallback += new EventHandler <AudioUnitEventArgs>(_callback);
            // initialize
            _audioUnit.Initialize();
        }