uint TapProc(AudioQueueProcessingTap audioQueueTap, uint inNumberOfFrames, ref AudioTimeStamp timeStamp, ref AudioQueueProcessingTapFlags flags, AudioBuffers data)
        {
            AudioQueueProcessingTapFlags sourceFlags;
            uint sourceFrames;

            if (audioQueueTap.GetSourceAudio(inNumberOfFrames, ref timeStamp, out sourceFlags, out sourceFrames, data) != AudioQueueStatus.Ok)
            {
                throw new ApplicationException();
            }

            for (int channel = 0; channel < data.Count; channel++)
            {
                preRenderData[channel] = data [channel].Data;
                data.SetData(channel, IntPtr.Zero);
            }

            renderTimeStamp.Flags = AudioTimeStamp.AtsFlags.SampleTimeValid;
            AudioUnitRenderActionFlags actionFlags = 0;

            AudioUnitStatus res = genericOutputUnit.Render(ref actionFlags, renderTimeStamp, 0, inNumberOfFrames, data);

            if (res != AudioUnitStatus.NoError)
            {
                throw new ApplicationException();
            }

            return(sourceFrames);
        }
示例#2
0
        void SetupAUGraph()
        {
            graph = new AUGraph();

            AudioComponentDescription mixerDescription = new AudioComponentDescription();

            mixerDescription.ComponentType         = AudioComponentType.Mixer;
            mixerDescription.ComponentSubType      = (int)AudioTypeMixer.MultiChannel;
            mixerDescription.ComponentFlags        = 0;
            mixerDescription.ComponentFlagsMask    = 0;
            mixerDescription.ComponentManufacturer = AudioComponentManufacturerType.Apple;

            AudioComponentDescription outputDesciption = new AudioComponentDescription();

            outputDesciption.ComponentType         = AudioComponentType.Output;
            outputDesciption.ComponentSubType      = (int)AudioTypeOutput.System;
            outputDesciption.ComponentFlags        = 0;
            outputDesciption.ComponentFlagsMask    = 0;
            outputDesciption.ComponentManufacturer = AudioComponentManufacturerType.Apple;

            int mixerNode  = graph.AddNode(mixerDescription);
            int outputNode = graph.AddNode(outputDesciption);

            AUGraphError error = graph.ConnnectNodeInput(mixerNode, 0, outputNode, 0);

            Assert.AreEqual(AUGraphError.OK, error);

            graph.Open();

            mMixer = graph.GetNodeInfo(mixerNode);

            AudioUnitStatus status = mMixer.SetElementCount(AudioUnitScopeType.Input, 0);

            Assert.AreEqual(AudioUnitStatus.OK, status);
        }
 void CheckStatus(AudioUnitStatus status)
 {
     if (status != AudioUnitStatus.NoError)
     {
         throw new InvalidProgramException();
     }
 }
示例#4
0
 private static void CheckUnitStatus(AudioUnitStatus status, string msg)
 {
     if (status != AudioUnitStatus.OK)
     {
         Log.Error("Audio Error [{0} / {1}]. Voices: {2}", msg, status, nbOfInstances);
         throw new AudioSystemInternalException(msg + " [Error=" + status + "].");
     }
 }
示例#5
0
 private static void CheckUnitStatus(AudioUnitStatus status, string msg)
 {
     if (status != AudioUnitStatus.OK)
     {
         Log.Error("Audio Error [{0} / {1}]. Voices: {2}", msg, status, nbOfInstances);
         throw new AudioSystemInternalException(msg + " [Error=" + status + "].");
     }   
 }
 public static AudioUnitStatus CheckError(AudioUnitStatus status)
 {
     if (status != AudioUnitStatus.OK)
     {
         ReportError(status);
     }
     return(status);
 }
        unsafe void TapPrepare(MTAudioProcessingTap tap, nint maxFrames, ref AudioStreamBasicDescription processingFormat)
        {
            // Store sample rate for CenterFrequency property
            context.SampleRate = processingFormat.SampleRate;

            /* Verify processing format (this is not needed for Audio Unit, but for RMS calculation). */
            VerifyProcessingFormat(processingFormat);

            if (processingFormat.FormatFlags.HasFlag(AudioFormatFlags.IsNonInterleaved))
            {
                context.IsNonInterleaved = true;
            }

            /* Create bandpass filter Audio Unit */

            var audioComponentDescription = AudioComponentDescription.CreateEffect(AudioTypeEffect.BandPassFilter);
            // TODO: https://trello.com/c/GZUGUyH0
            var audioComponent = AudioComponent.FindNextComponent(null, ref audioComponentDescription);

            if (audioComponent == null)
            {
                return;
            }

            AudioUnitStatus error = AudioUnitStatus.NoError;

            AudioUnit.AudioUnit audioUnit = audioComponent.CreateAudioUnit();
            try {
                audioUnit.SetAudioFormat(processingFormat, AudioUnitScopeType.Input);
                audioUnit.SetAudioFormat(processingFormat, AudioUnitScopeType.Output);
            } catch (AudioUnitException) {
                error = AudioUnitStatus.FormatNotSupported;
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = audioUnit.SetRenderCallback(Render, AudioUnitScopeType.Input);
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = audioUnit.SetMaximumFramesPerSlice((uint)maxFrames, AudioUnitScopeType.Global);
            }

            if (error == AudioUnitStatus.NoError)
            {
                error = (AudioUnitStatus)audioUnit.Initialize();
            }

            if (error != AudioUnitStatus.NoError)
            {
                audioUnit.Dispose();
                audioUnit = null;
            }

            context.AudioUnit = audioUnit;
        }
示例#8
0
		static void ScheduledAudioFileRegionCallback (IntPtr userData, ref ScheduledAudioFileRegion fileRegion, AudioUnitStatus status)
		{
			if (userData == IntPtr.Zero)
				return;
			
			var handle = GCHandle.FromIntPtr (userData);
			var inst = (AUScheduledAudioFileRegion) handle.Target;
			inst?.completionHandler (inst, status);
		}
示例#9
0
        public AudioUnitStatus PullInput(ref AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timestamp, uint frameCount, int inputBusNumber, AURenderPullInputBlock pullInputBlock)
        {
            if (pullInputBlock == null)
            {
                return(AudioUnitStatus.NoConnection);
            }

            PrepareInputBufferList();
            AudioUnitStatus s = pullInputBlock(ref actionFlags, ref timestamp, frameCount, inputBusNumber, MutableAudioBufferList);

            return(s);
        }
        void SetCenterFrequency(float newFrequency)
        {
            if (context.AudioUnit == null)
            {
                return;
            }

            AudioUnitStatus status = context.AudioUnit.SetParameter(AudioUnitParameterType.BandpassCenterFrequency, newFrequency, AudioUnitScopeType.Global);

            if (status != AudioUnitStatus.NoError)
            {
                Console.WriteLine("AudioUnit.SetParameter(AudioUnitParameterType.BandpassCenterFrequency): {0}", status);
            }
        }
示例#11
0
        public void DoTest()
        {
            SetupAUGraph();

            // One of these has to be commented out depending on old\new build
            graph.AddRenderNotify(GraphRenderCallback);
            //graph.RenderCallback += HandleRenderCallback;

            AudioUnitStatus status = mMixer.SetRenderCallback(MixerRenderCallback);

            Assert.AreEqual(AudioUnitStatus.OK, status);

            WaitOnGraphAndMixerCallbacks();
        }
        void SetBandwidth(float newBandwidth)
        {
            if (context.AudioUnit == null)
            {
                return;
            }

            // Console.WriteLine ("newBandwidth={0}", newBandwidth);
            AudioUnitStatus status = context.AudioUnit.SetParameter(AudioUnitParameterType.BandpassBandwidth, newBandwidth, AudioUnitScopeType.Global);

            if (status != AudioUnitStatus.NoError)
            {
                Console.WriteLine("AudioUnit.SetParameter(AudioUnitParameterType.BandpassBandwidth): {0}", status);
            }
        }
示例#13
0
#pragma warning restore 612
#endif

        public AudioUnitStatus AddRenderNotify(RenderDelegate callback)
        {
            if (callback == null)
            {
                throw new ArgumentException("Callback can not be null");
            }

            AudioUnitStatus error = AudioUnitStatus.OK;

            if (graphUserCallbacks.Count == 0)
            {
                error = (AudioUnitStatus)AUGraphAddRenderNotify(handle, renderCallback, GCHandle.ToIntPtr(gcHandle));
            }

            if (error == AudioUnitStatus.OK)
            {
                graphUserCallbacks.Add(callback);
            }
            return(error);
        }
示例#14
0
        public AudioUnitStatus AddRenderNotify(RenderDelegate callback)
        {
            if (callback is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(callback));
            }

            AudioUnitStatus error = AudioUnitStatus.OK;

            if (graphUserCallbacks.Count == 0)
            {
                error = (AudioUnitStatus)AUGraphAddRenderNotify(Handle, renderCallback, GCHandle.ToIntPtr(gcHandle));
            }

            if (error == AudioUnitStatus.OK)
            {
                graphUserCallbacks.Add(callback);
            }
            return(error);
        }
示例#15
0
        public AudioUnitStatus InternalRenderBlockProc(ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timestamp, uint frameCount, nint outputBusNumber, AudioBuffers outputData, AURenderEventEnumerator realtimeEventListHead, AURenderPullInputBlock pullInputBlock)
        {
            var transportStateFlags = (AUHostTransportStateFlags)0;

            double currentSamplePosition  = 0;
            double cycleStartBeatPosition = 0;
            double cycleEndBeatPosition   = 0;

            var callBack = TransportStateBlock;

            if (callBack != null)
            {
                callBack(ref transportStateFlags, ref currentSamplePosition, ref cycleStartBeatPosition, ref cycleEndBeatPosition);
            }

            var state = Kernel;
            var input = inputBus;

            var             pullFlags = (AudioUnitRenderActionFlags)0;
            AudioUnitStatus err       = input.PullInput(ref pullFlags, timestamp, frameCount, 0, pullInputBlock);

            if (err != AudioUnitStatus.NoError)
            {
                return(err);
            }

            AudioBuffers inAudioBufferList = input.MutableAudioBufferList;

            if (outputData [0].Data == IntPtr.Zero)
            {
                for (int i = 0; i < outputData.Count; i++)
                {
                    outputData.SetData(i, inAudioBufferList [i].Data);
                }
            }

            state.SetBuffers(inAudioBufferList, outputData);
            state.ProcessWithEvents(timestamp, (int)frameCount, realtimeEventListHead);

            return(AudioUnitStatus.NoError);
        }
示例#16
0
        public AudioUnitStatus RemoveRenderNotify(RenderDelegate callback)
        {
            if (callback == null)
            {
                throw new ArgumentException("Callback can not be null");
            }
            if (!graphUserCallbacks.Contains(callback))
            {
                throw new ArgumentException("Cannot unregister a callback that has not been registered");
            }

            AudioUnitStatus error = AudioUnitStatus.OK;

            if (graphUserCallbacks.Count == 0)
            {
                error = (AudioUnitStatus)AUGraphRemoveRenderNotify(handle, renderCallback, GCHandle.ToIntPtr(gcHandle));
            }

            graphUserCallbacks.Remove(callback);              // Remove from list even if there is an error
            return(error);
        }
		void CheckStatus (AudioUnitStatus status)
		{
			if (status != AudioUnitStatus.NoError)
				throw new InvalidProgramException ();
		}
        unsafe AudioUnitStatus AudioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags,
                                                        AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            _clock.Restart();
            AudioUnitStatus err = AudioUnitStatus.OK;

            if (_audioInputIsAvailable && _numInputChannels > 0)
            {
                err = _audioUnit.Render(ref actionFlags, timeStamp, 1, numberFrames, data);
            }

            var dataPtr = data[0].Data;

            if (_callback != null)
            {
                if (numberFrames > _sampleBuffer.NumSamples)
                {
                    PrepareFloatBuffers((int)numberFrames);
                }

                if (_audioInputIsAvailable && _numInputChannels > 0)
                {
                    var shortData = (short *)dataPtr.ToPointer();
                    if (_numInputChannels >= 2)
                    {
                        float *leftInput  = _inputChannels[0];
                        float *rightInput = _inputChannels[1];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *leftInput++  = *shortData++ *ShortToFloat;
                            *rightInput++ = *shortData++ *ShortToFloat;
                        }
                    }
                    else
                    {
                        float *leftInput = _inputChannels[0];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *leftInput++ = *shortData++ *ShortToFloat;
                            ++shortData;
                        }
                    }
                }
                else
                {
                    for (var i = _numInputChannels; --i >= 0;)
                    {
                        _sampleBuffer.ClearChannel(i);
                    }
                }

                _callback.AudioDeviceIOCallback(
                    _inputChannels, _numInputChannels,
                    _outputChannels, _numOutputChannels,
                    (int)numberFrames
                    );

                if (_out != null)
                {
                    byte *bytes = (byte *)_outputChannels[0];
                    for (var i = 0; i < numberFrames * sizeof(float); ++i)
                    {
                        _out.WriteByte(*bytes++);
                    }
                }

                {
                    var    shortData   = (short *)dataPtr.ToPointer();
                    float *leftOutput  = _outputChannels[0];
                    float *rightOutput = _outputChannels[1];
                    if (_numOutputChannels >= 2)
                    {
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *shortData++ = (short)(*leftOutput++ *FloatToShort);
                            *shortData++ = (short)(*rightOutput++ *FloatToShort);
                        }
                    }
                    else if (_numOutputChannels == 1)
                    {
                        float *output = _outputChannels[0];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            short sample      = (short)(*output++ *FloatToShort);
                            *     shortData++ = sample;
                            *     shortData++ = sample;
                        }
                    }
                    else
                    {
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *shortData++ = 0;
                            *shortData++ = 0;
                        }
                    }
                }
            }
            else
            {
                var shortData = (short *)dataPtr.ToPointer();
                for (var i = 0; i < numberFrames; ++i)
                {
                    *shortData++ = 0;
                    *shortData++ = 0;
                }
            }

            _clock.Stop();

            return(err);
        }
示例#19
0
        static void ScheduledAudioFileRegionCallback(IntPtr userData, ref ScheduledAudioFileRegion fileRegion, AudioUnitStatus status)
        {
            if (userData == IntPtr.Zero)
            {
                return;
            }

            var handle = GCHandle.FromIntPtr(userData);
            var inst   = (AUScheduledAudioFileRegion)handle.Target;

            inst?.completionHandler(inst, status);
        }