unsafe void TapPrepare(MTAudioProcessingTap tap, nint maxFrames, ref AudioStreamBasicDescription processingFormat)
        {
            // Store sample rate for CenterFrequency property
            context.SampleRate = processingFormat.SampleRate;

            /* Verify processing format (this is not needed for Audio Unit, but for RMS calculation). */
            VerifyProcessingFormat(processingFormat);

            if (processingFormat.FormatFlags.HasFlag(AudioFormatFlags.IsNonInterleaved))
            {
                context.IsNonInterleaved = true;
            }

            /* Create bandpass filter Audio Unit */

            var audioComponentDescription = AudioComponentDescription.CreateEffect(AudioTypeEffect.BandPassFilter);
            // TODO: https://trello.com/c/GZUGUyH0
            var audioComponent = AudioComponent.FindNextComponent(null, ref audioComponentDescription);

            if (audioComponent == null)
            {
                return;
            }

            AudioUnitStatus error = AudioUnitStatus.NoError;

            AudioUnit.AudioUnit audioUnit = audioComponent.CreateAudioUnit();
            try {
                audioUnit.SetAudioFormat(processingFormat, AudioUnitScopeType.Input);
                audioUnit.SetAudioFormat(processingFormat, AudioUnitScopeType.Output);
            } catch (AudioUnitException) {
                error = AudioUnitStatus.FormatNotSupported;
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = audioUnit.SetRenderCallback(Render, AudioUnitScopeType.Input);
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = audioUnit.SetMaximumFramesPerSlice((uint)maxFrames, AudioUnitScopeType.Global);
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = (AudioUnitStatus)audioUnit.Initialize();
            }

            if (error != AudioUnitStatus.NoError)
            {
                audioUnit.Dispose();
                audioUnit = null;
            }

            context.AudioUnit = audioUnit;
        }
Esempio n. 2
0
        private AudioComponent GetHALAudioComponent()
        {
            var componentDescription = new AudioComponentDescription()
            {
                ComponentType         = AudioComponentType.Output,
                ComponentSubType      = (int)AudioUnitSubType.HALOutput,
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
            };

            return(AudioComponent.FindNextComponent(null, ref componentDescription));
        }
Esempio n. 3
0
        theUnit GetAudioUnitForTest()
        {
            AudioComponentDescription desc = new AudioComponentDescription();

            desc.ComponentType         = AudioComponentType.Output;
            desc.ComponentSubType      = 1634230636;        // 'ahal'
            desc.ComponentFlags        = 0;
            desc.ComponentFlagsMask    = 0;
            desc.ComponentManufacturer = AudioComponentManufacturerType.Apple;

            AudioComponent comp = AudioComponent.FindNextComponent(null, ref desc);
            theUnit        unit = new theUnit(comp);

            return(unit);
        }
Esempio n. 4
0
        public IOSAudioProcessor()
        {
            var inputComponent = AudioComponent.FindNextComponent(
                null,
                new AudioComponentDescription
            {
                ComponentFlags        = 0,
                ComponentFlagsMask    = 0,
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
                ComponentSubType      = (int)AudioTypeOutput.Remote,
                ComponentType         = AudioComponentType.Output
            });

            recorder = inputComponent.CreateAudioUnit();
            recorder.SetEnableIO(true, AudioUnitScopeType.Input, inputBus);
            recorder.SetEnableIO(false, AudioUnitScopeType.Output, outputBus);

            var audioFormat = new AudioStreamBasicDescription
            {
                SampleRate       = StudentDemo.Globals.SAMPLERATE,
                Format           = AudioFormatType.LinearPCM,
                FormatFlags      = AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsPacked,
                FramesPerPacket  = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel   = 16,
                BytesPerPacket   = 2,
                BytesPerFrame    = 2
            };

            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Output, inputBus);
            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Input, outputBus);

            recorder.SetInputCallback(AudioInputCallBack, AudioUnitScopeType.Global, inputBus);

            // TODO: Disable buffers (requires interop)
            aBuffer = new AudioBuffer
            {
                NumberChannels = 1,
                DataByteSize   = 512 * 2,
                Data           = System.Runtime.InteropServices.Marshal.AllocHGlobal(512 * 2)
            };
        }
Esempio n. 5
0
            unsafe void TapPrepare(MTAudioProcessingTap tap, nint maxFrames, ref AudioStreamBasicDescription processingFormat)
            {
                // Store sample rate for CenterFrequency property
                context.SampleRate = processingFormat.SampleRate;

                /* Verify processing format (this is not needed for Audio Unit, but for RMS calculation). */
                VerifyProcessingFormat(processingFormat);

                if (processingFormat.FormatFlags.HasFlag(AudioFormatFlags.IsNonInterleaved))
                {
                    context.IsNonInterleaved = true;
                }

                /* Create bandpass filter Audio Unit */

                var audioComponentDescription = AudioComponentDescription.CreateEffect(AudioTypeEffect.NBandEq);
                // TODO: https://trello.com/c/GZUGUyH0
                var audioComponent = AudioComponent.FindNextComponent(null, ref audioComponentDescription);

                if (audioComponent == null)
                {
                    return;
                }

                var error     = AudioUnitStatus.NoError;
                var audioUnit = audioComponent.CreateAudioUnit();

                try
                {
                    audioUnit.SetFormat(processingFormat, AudioUnitScopeType.Input);
                    audioUnit.SetFormat(processingFormat, AudioUnitScopeType.Output);
                }
                catch (AudioUnitException)
                {
                    error = AudioUnitStatus.FormatNotSupported;
                }

                if (error == AudioUnitStatus.NoError)
                {
                    error = audioUnit.SetRenderCallback(Render, AudioUnitScopeType.Input);
                }

                if (error == AudioUnitStatus.NoError)
                {
                    error = audioUnit.SetMaximumFramesPerSlice((uint)maxFrames, AudioUnitScopeType.Global);
                }

                if (error == AudioUnitStatus.NoError)
                {
                    error = (AudioUnitStatus)audioUnit.Initialize();
                }

                if (error != AudioUnitStatus.NoError)
                {
                    audioUnit.Dispose();
                    audioUnit = null;
                }

                context.AudioUnit = audioUnit;
                uint value = 10;
                uint size  = sizeof(uint);
                var  stat  = AudioUnitSetProperty(audioUnit.Handle, AUNGraphicParams.NumberOfBands, AudioUnitScopeType.Global, 0,
                                                  ref value, size);

                for (var i = 0; i < Parent.Bands.Length; i++)
                {
                    var band = Parent.Bands[i];
                    var freq = context.AudioUnit.SetParameter(AudioUnitParameterType.AUNBandEQFrequency + i, band.Center,
                                                              AudioUnitScopeType.Global);
                    var bypass = context.AudioUnit.SetParameter(AudioUnitParameterType.AUNBandEQBypassBand + i, 0,
                                                                AudioUnitScopeType.Global);
                    SetBand(i, band.Gain);
                    Console.WriteLine(freq);
                }
            }
Esempio n. 6
0
        private void startTalking(UdpClient audioCaller)
        {
            //Stop old recording session

            //Generate new WaveFormat
            //    recorder.WaveFormat = new WaveFormat(16000, 16, 1);
            //    recorder.BufferMilliseconds = 50;
            //    recorder.DataAvailable += SendAudio; //Add event to SendAudio


//			recorder = new InputAudioQueue (playerFormat);
//
//
//			for (int i = 0; i < BUFFERCOUNT; i++) {
//				IntPtr aBUff;
//				//recorder.AllocateBuffer (AUDIOBUFFERSIZE, out aBUff);
//				byteSize = AUDIOBUFFERSIZE * playerFormat.BytesPerPacket;
//				recorder.AllocateBufferWithPacketDescriptors (byteSize, AUDIOBUFFERSIZE, out aBUff);
//				recorder.EnqueueBuffer (aBUff, byteSize, null);
//				Console.WriteLine ("Buffer allocated, enqueueing");
//			}

            //New stuffs

            var inputComponent = AudioComponent.FindNextComponent(
                null,
                new AudioComponentDescription
            {
                ComponentFlags        = 0,
                ComponentFlagsMask    = 0,
                ComponentManufacturer = AudioComponentManufacturerType.Apple,
                ComponentSubType      = (int)AudioTypeOutput.Remote,
                ComponentType         = AudioComponentType.Output
            });

            recorder = inputComponent.CreateAudioUnit();
            recorder.SetEnableIO(true, AudioUnitScopeType.Input, inputBus);
            recorder.SetEnableIO(false, AudioUnitScopeType.Output, outputBus);

            var audioFormat = new AudioStreamBasicDescription
            {
                SampleRate       = Globals.SAMPLERATE,
                Format           = AudioFormatType.LinearPCM,
                FormatFlags      = AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsPacked,
                FramesPerPacket  = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel   = 16,
                BytesPerPacket   = 2,
                BytesPerFrame    = 2
            };

            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Output, inputBus);
            recorder.SetAudioFormat(audioFormat, AudioUnitScopeType.Input, outputBus);

            recorder.SetInputCallback(AudioInputCallBack, AudioUnitScopeType.Global, inputBus);

            // TODO: Disable buffers (requires interop)
            aBuffer = new AudioBuffer
            {
                NumberChannels = 1,
                DataByteSize   = 512 * 2,
                Data           = System.Runtime.InteropServices.Marshal.AllocHGlobal(512 * 2)
            };
            isTalking = true;
            //recorder.InputCompleted += SendAudio;
            //recorder.Start ();

            recorder.Initialize();
            recorder.Start();
        }