uint TapProc(AudioQueueProcessingTap audioQueueTap, uint inNumberOfFrames, ref AudioTimeStamp timeStamp, ref AudioQueueProcessingTapFlags flags, AudioBuffers data)
        {
            AudioQueueProcessingTapFlags sourceFlags;
            uint sourceFrames;

            if (audioQueueTap.GetSourceAudio(inNumberOfFrames, ref timeStamp, out sourceFlags, out sourceFrames, data) != AudioQueueStatus.Ok)
            {
                throw new ApplicationException();
            }

            for (int channel = 0; channel < data.Count; channel++)
            {
                preRenderData[channel] = data [channel].Data;
                data.SetData(channel, IntPtr.Zero);
            }

            renderTimeStamp.Flags = AudioTimeStamp.AtsFlags.SampleTimeValid;
            AudioUnitRenderActionFlags actionFlags = 0;

            AudioUnitStatus res = genericOutputUnit.Render(ref actionFlags, renderTimeStamp, 0, inNumberOfFrames, data);

            if (res != AudioUnitStatus.NoError)
            {
                throw new ApplicationException();
            }

            return(sourceFrames);
        }
 static extern int AudioUnitRender(IntPtr inUnit,
                                   ref AudioUnitRenderActionFlags ioActionFlags,
                                   ref AudioTimeStamp inTimeStamp,
                                   UInt32 inOutputBusNumber,
                                   UInt32 inNumberFrames,
                                   AudioBufferList ioData
                                   );
Exemplo n.º 3
0
        static AudioUnitStatus renderCallback(IntPtr inRefCon,
                                              ref AudioUnitRenderActionFlags _ioActionFlags,
                                              ref AudioTimeStamp _inTimeStamp,
                                              uint _inBusNumber,
                                              uint _inNumberFrames,
                                              IntPtr _ioData)
        {
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst    = (AUGraph)handler.Target;
            HashSet <RenderDelegate> renderers = inst.graphUserCallbacks;

            if (renderers.Count != 0)
            {
                using (var buffers = new AudioBuffers(_ioData)) {
                    foreach (RenderDelegate renderer in renderers)
                    {
                        renderer(_ioActionFlags, _inTimeStamp, _inBusNumber, _inNumberFrames, buffers);
                    }
                    return(AudioUnitStatus.OK);
                }
            }

            return(AudioUnitStatus.InvalidParameter);
        }
        static int renderCallback(IntPtr inRefCon,
                                  ref AudioUnitRenderActionFlags _ioActionFlags,
                                  ref AudioTimeStamp _inTimeStamp,
                                  uint _inBusNumber,
                                  uint _inNumberFrames,
                                  AudioBufferList _ioData)
        {
            //System.Diagnostics.Debug.WriteLine(_ioActionFlags);
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst    = (AudioUnit)handler.Target;

            // evoke event handler with an argument
            if (inst._renderEvent != null)
            {
                var args = new AudioUnitEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst._renderEvent(inst, args);
            }

            return(0); // noerror
        }
Exemplo n.º 5
0
        static AudioUnitStatus RenderCallbackImpl(IntPtr clientData, ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, IntPtr data)
        {
            GCHandle gch = GCHandle.FromIntPtr(clientData);
            var      au  = (AudioUnit)gch.Target;

            return(au.render(actionFlags, timeStamp, busNumber, numberFrames, new AudioBuffers(data)));
        }
Exemplo n.º 6
0
        static int oldRenderCallback(IntPtr inRefCon,
                                     ref AudioUnitRenderActionFlags _ioActionFlags,
                                     ref AudioTimeStamp _inTimeStamp,
                                     int _inBusNumber,
                                     int _inNumberFrames,
                                     AudioBufferList _ioData)
        {
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst    = (AUGraph)handler.Target;

            // invoke event handler with an argument
            if (inst.RenderCallback != null)
            {
                var args = new AudioGraphEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst.RenderCallback(inst, args);
            }

            return(0);            // noerror
        }
        uint TapProc(AudioQueueProcessingTap audioQueueTap, uint numberOfFrames, ref AudioTimeStamp timeStamp, ref AudioQueueProcessingTapFlags flags, AudioBuffers data)
        {
            AudioQueueProcessingTapFlags source_flags;
            uint source_frames;

            if (audioQueueTap.GetSourceAudio(numberOfFrames, ref timeStamp, out source_flags, out source_frames, data) != AudioQueueStatus.Ok)
            {
                throw new ApplicationException();
            }

            preRenderData = data [0].Data;
            data.SetData(0, IntPtr.Zero);

            var renderTimeStamp = new AudioTimeStamp();

            renderTimeStamp.Flags = AudioTimeStamp.AtsFlags.SampleTimeValid;
            AudioUnitRenderActionFlags action_flags = 0;

            var res = genericOutputUnit.Render(ref action_flags, renderTimeStamp, 0, numberOfFrames, data);

            if (res != AudioUnitStatus.NoError)
            {
                throw new ApplicationException();
            }

            return(source_frames);
        }
Exemplo n.º 8
0
        static int renderCallback(IntPtr inRefCon,
            ref AudioUnitRenderActionFlags _ioActionFlags,
            ref AudioTimeStamp _inTimeStamp,
            uint _inBusNumber,
            uint _inNumberFrames,
            AudioBufferList _ioData)
        {
            //System.Diagnostics.Debug.WriteLine(_ioActionFlags);
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (AudioUnit)handler.Target;
            
            // evoke event handler with an argument
            if (inst._renderEvent != null) 
            {
                var args = new AudioUnitEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst._renderEvent(inst, args);
            }

            return 0; // noerror
        }
	    public AudioGraphEventArgs(AudioUnitRenderActionFlags actionFlags,
				       MonoMac.AudioToolbox.AudioTimeStamp timeStamp,
				       int busNumber,
				       int numberFrames,
				       AudioBufferList data)
		    : base(actionFlags, timeStamp, busNumber, numberFrames, data)
	    {
	    }
Exemplo n.º 10
0
 public AudioGraphEventArgs(AudioUnitRenderActionFlags actionFlags,
                            MonoMac.AudioToolbox.AudioTimeStamp timeStamp,
                            int busNumber,
                            int numberFrames,
                            AudioBufferList data)
     : base(actionFlags, timeStamp, busNumber, numberFrames, data)
 {
 }
Exemplo n.º 11
0
		public AudioUnitStatus PullInput (ref AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timestamp, uint frameCount, int inputBusNumber, AURenderPullInputBlock pullInputBlock)
		{
			if (pullInputBlock == null)
				return AudioUnitStatus.NoConnection;

			PrepareInputBufferList ();
			AudioUnitStatus s = pullInputBlock (ref actionFlags, ref timestamp, frameCount, inputBusNumber, MutableAudioBufferList);
			return s;
		}
Exemplo n.º 12
0
 AudioUnitStatus ConvertInputRenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     renderTimeStamp.SampleTime += numberFrames;
     for (int channel = 0; channel < data.Count; channel++)
     {
         data.SetData(channel, preRenderData [channel]);
     }
     return(AudioUnitStatus.NoError);
 }
        AudioUnitStatus RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            // getting microphone input signal
            audioUnit.Render(ref actionFlags, timeStamp, 1, numberFrames, data);

            // Getting a pointer to a buffer to be filled
            IntPtr outL = data [0].Data;
            IntPtr outR = data [1].Data;

            // Getting signal level
            // https://en.wikipedia.org/wiki/Root_mean_square
            float sqrSum = 0;

            for (int j = 0; j < numberFrames; j++)
            {
                float v = Marshal.ReadInt16(outL, j * sizeof(Int16));
                sqrSum += (v * v);
            }
            SignalLevel = (float)Math.Sqrt(sqrSum / numberFrames);

            if (triggered <= 0 && SignalLevel > Threshold)
            {
                triggered = FramesToPlay;
            }

            // playing sound
            unsafe {
                var outLPtr = (int *)outL.ToPointer();
                var outRPtr = (int *)outR.ToPointer();

                for (int i = 0; i < numberFrames; i++)
                {
                    triggered = Math.Max(0, triggered - 1);

                    if (triggered > 0)
                    {
                        var buf0 = (int *)buffer [0].Data;
                        var buf1 = (int *)buffer [numberOfChannels - 1].Data;

                        ++CurrentFrame;
                        *outLPtr++ = buf0 [currentFrame];
                        *outRPtr++ = buf1 [currentFrame];
                    }
                    else
                    {
                        // 0-filling
                        *outLPtr++ = 0;
                        *outRPtr++ = 0;
                    }
                }
            }

            return(AudioUnitStatus.NoError);
        }
Exemplo n.º 14
0
 public AudioUnitStatus TryRender(AudioUnitRenderActionFlags flags,
                                  AudioTimeStamp timeStamp,
                                  int outputBusnumber,
                                  int numberFrames, AudioBufferList data)
 {
     return((AudioUnitStatus)AudioUnitRender(handle,
                                             ref flags,
                                             ref timeStamp,
                                             outputBusnumber,
                                             numberFrames,
                                             data));
 }
Exemplo n.º 15
0
 public AudioUnitEventArgs(AudioUnitRenderActionFlags actionFlags,
                           AudioTimeStamp timestamp,
                           int busNumber,
                           int frames,
                           AudioBufferList data)
 {
     ActionFlags    = actionFlags;
     this.TimeStamp = timestamp;
     BusNumber      = busNumber;
     NumberFrames   = frames;
     Data           = data;
 }
Exemplo n.º 16
0
        public AudioUnitEventArgs(AudioUnitRenderActionFlags actionFlags,
				  MonoMac.AudioToolbox.AudioTimeStamp timestamp,
				  int busNumber,
				  int frames,
				  AudioBufferList data)
        {
            ActionFlags = actionFlags;
            this.TimeStamp = timestamp;
            BusNumber = busNumber;
            NumberFrames = frames;
            Data = data;
        }
Exemplo n.º 17
0
        public AudioUnitStatus PullInput(ref AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timestamp, uint frameCount, int inputBusNumber, AURenderPullInputBlock pullInputBlock)
        {
            if (pullInputBlock == null)
            {
                return(AudioUnitStatus.NoConnection);
            }

            PrepareInputBufferList();
            AudioUnitStatus s = pullInputBlock(ref actionFlags, ref timestamp, frameCount, inputBusNumber, MutableAudioBufferList);

            return(s);
        }
        public void Render(AudioUnitRenderActionFlags flags, AudioTimeStamp timeStamp, UInt32 outputBusnumber, UInt32 numberFrames, AudioBufferList data)
        {
            int err = AudioUnitRender(_audioUnit,
                                      ref flags,
                                      ref timeStamp,
                                      outputBusnumber,
                                      numberFrames,
                                      data);

            if (err != 0)
            {
                throw new InvalidOperationException(String.Format("Error code:{0}", err));
            }
        }
Exemplo n.º 19
0
        AudioUnitStatus Render(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            // Just return audio buffers from MTAudioProcessingTap.
            MTAudioProcessingTapFlags flags;
            CMTimeRange range;
            nint        n;
            var         error = (AudioUnitStatus)(int)audioProcessingTap.GetSourceAudio((nint)numberFrames, data, out flags, out range, out n);

            if (error != AudioUnitStatus.NoError)
            {
                Console.WriteLine("{0} audioProcessingTap.GetSourceAudio failed", error);
            }
            return(error);
        }
Exemplo n.º 20
0
        /// <summary>
        /// Renders the mixer node. Orchestrates dynamic changes to tempo and beatcode.
        /// </summary>
        /// <returns>The render delegate.</returns>
        /// <param name="actionFlags">Action flags.</param>
        /// <param name="timeStamp">Time stamp.</param>
        /// <param name="busNumber">Bus number.</param>
        /// <param name="numberFrames">Number frames.</param>
        /// <param name="data">Data.</param>
        unsafe AudioUnitStatus MixerRenderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            if (busNumber >= Streams.Count)
            {
                // this prevents the buffer from doubling up with unused buses
                return(AudioUnitStatus.InvalidElement);
            }

            var outLeft  = (float *)data[0].Data;
            var outRight = (float *)data[1].Data;

            // if theres a count-off, we read from the countoff source
            if (CountOffSampleDuration > 0)
            {
                // skip all inputs but the last one so that non-count off cycle starts with bus 0
                if (busNumber != Streams.Count - 1)
                {
                    return(AudioUnitStatus.InvalidElement);
                }

                var stream = Streams[(int)busNumber];

                //if (stream.IsMuted)
                //{
                //    EnableInput(stream, true);
                //}

                _countOff.Read(outLeft, outRight, numberFrames);

                CountOffSampleDuration -= numberFrames;

                // set elapsed bpm and cycles to 0
                if (CountOffSampleDuration == 0)
                {
                    Metronome.Instance.ElapsedBpm -= Metronome.Instance.ConvertSamplesToBpm(_countOffTotal);
                    cycle = -1;
                    EnableInput(stream, !stream.IsMuted);
                }

                return(AudioUnitStatus.OK);
            }

            IStreamProvider source = Streams[(int)busNumber];

            source.Read(outLeft, outRight, numberFrames);

            return(AudioUnitStatus.OK);
        }
Exemplo n.º 21
0
        static AudioUnitStatus RenderCallbackImpl(IntPtr clientData, ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, IntPtr data)
        {
            GCHandle gch = GCHandle.FromIntPtr(clientData);
            var      au  = (AUGraph)gch.Target;

            RenderDelegate callback;

            if (!au.nodesCallbacks.TryGetValue(busNumber, out callback))
            {
                return(AudioUnitStatus.InvalidParameter);
            }

            using (var buffers = new AudioBuffers(data)) {
                return(callback(actionFlags, timeStamp, busNumber, numberFrames, buffers));
            }
        }
Exemplo n.º 22
0
        public void Render(AudioUnitRenderActionFlags flags,
                           AudioTimeStamp timeStamp,
                           int outputBusnumber,
                           int numberFrames, AudioBufferList data)
        {
            int err = AudioUnitRender(handle,
                                      ref flags,
                                      ref timeStamp,
                                      outputBusnumber,
                                      numberFrames,
                                      data);

            if (err != 0)
            {
                throw new AudioUnitException(err);
            }
        }
        AudioUnitStatus RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			// getting microphone input signal
			audioUnit.Render (ref actionFlags, timeStamp, 1, numberFrames, data);

			// Getting a pointer to a buffer to be filled
			IntPtr outL = data [0].Data;
			IntPtr outR = data [1].Data;

			// Getting signal level
			// https://en.wikipedia.org/wiki/Root_mean_square
			float sqrSum = 0;
			for (int j = 0;  j < numberFrames; j++) {
				float v = Marshal.ReadInt16(outL, j * sizeof(Int16));
				sqrSum += (v * v);
			}
			SignalLevel = (float)Math.Sqrt (sqrSum / numberFrames);

			if (triggered <= 0 && SignalLevel > Threshold)
				triggered = FramesToPlay;

			// playing sound
			unsafe {
				var outLPtr = (int*)outL.ToPointer ();
				var outRPtr = (int*)outR.ToPointer ();

				for (int i = 0; i < numberFrames; i++) {
					triggered = Math.Max (0, triggered - 1);

					if (triggered > 0) {
						var buf0 = (int*)buffer [0].Data;
						var buf1 = (int*)buffer [numberOfChannels - 1].Data;

						++CurrentFrame;
						*outLPtr++ = buf0 [currentFrame];
						*outRPtr++ = buf1 [currentFrame];
					} else {
						// 0-filling
						*outLPtr++ = 0;
						*outRPtr++ = 0;
					}
				}
			}

			return AudioUnitStatus.NoError;
		}
Exemplo n.º 24
0
        AudioUnitStatus _audioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            if (_processBlockEventHandler != null)
            {
                if (_processBlockArgs == null ||
                    _processBlockArgs.ChannelCount != _audioFormat.ChannelsPerFrame ||
                    _processBlockArgs.NumberOfFrames != numberFrames)
                {
                    _processBlockArgs = new NativeProcessBlockEventArgs((int)numberFrames, _audioFormat.ChannelsPerFrame);
                }

                _processBlockArgs.AudioBuffers = data;
                _processBlockEventHandler(this, _processBlockArgs);

                //_processBlockArgs.ReturnInterleavedBuffer ();
                _processBlockArgs.ReturnNonInterleavedBuffer();
            }

            return(AudioUnitStatus.NoError);
        }
Exemplo n.º 25
0
        public AudioUnitStatus InternalRenderBlockProc(ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timestamp, uint frameCount, nint outputBusNumber, AudioBuffers outputData, AURenderEventEnumerator realtimeEventListHead, AURenderPullInputBlock pullInputBlock)
        {
            var transportStateFlags = (AUHostTransportStateFlags)0;

            double currentSamplePosition  = 0;
            double cycleStartBeatPosition = 0;
            double cycleEndBeatPosition   = 0;

            var callBack = TransportStateBlock;

            if (callBack != null)
            {
                callBack(ref transportStateFlags, ref currentSamplePosition, ref cycleStartBeatPosition, ref cycleEndBeatPosition);
            }

            var state = Kernel;
            var input = inputBus;

            var             pullFlags = (AudioUnitRenderActionFlags)0;
            AudioUnitStatus err       = input.PullInput(ref pullFlags, timestamp, frameCount, 0, pullInputBlock);

            if (err != AudioUnitStatus.NoError)
            {
                return(err);
            }

            AudioBuffers inAudioBufferList = input.MutableAudioBufferList;

            if (outputData [0].Data == IntPtr.Zero)
            {
                for (int i = 0; i < outputData.Count; i++)
                {
                    outputData.SetData(i, inAudioBufferList [i].Data);
                }
            }

            state.SetBuffers(inAudioBufferList, outputData);
            state.ProcessWithEvents(timestamp, (int)frameCount, realtimeEventListHead);

            return(AudioUnitStatus.NoError);
        }
Exemplo n.º 26
0
        unsafe AudioUnitStatus HandleRenderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            var sndbuf = soundBuffer [busNumber];

            var sample     = sndbuf.SampleNum;              // frame number to start from
            var bufSamples = sndbuf.TotalFrames;            // total number of frames in the sound buffer
            var input      = (int *)sndbuf.Data;

            var outA = (int *)data [0].Data;             // output audio buffer for L channel
            var outB = (int *)data [1].Data;             // output audio buffer for R channel

            // for demonstration purposes we've configured 2 stereo input busses for the mixer unit
            // but only provide a single channel of data from each input bus when asked and silence for the other channel
            // alternating as appropriate when asked to render bus 0 or bus 1's input
            for (var i = 0; i < numberFrames; ++i)
            {
                if (busNumber == 1)
                {
                    outA [i] = 0;
                    outB [i] = input [sample++];
                }
                else
                {
                    outA [i] = input[sample++];
                    outB [i] = 0;
                }

                if (sample > bufSamples)
                {
                    // start over from the beginning of the data, our audio simply loops
                    Debug.Print("Looping data for bus {0} after {1} source frames rendered", busNumber, sample - 1);
                    sample = 0;
                }
            }

            // keep track of where we are in the source data buffer
            sndbuf.SampleNum = sample;

            return(AudioUnitStatus.OK);
        }
Exemplo n.º 27
0
        AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit)
        {
            var buffer = new AudioBuffer()
            {
                NumberChannels = 1,
                DataByteSize   = (int)numberFrames * 2,
                Data           = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2)
            };

            var bufferList = new AudioBuffers(1);

            bufferList[0] = buffer;

            var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList);

            var send = new byte[buffer.DataByteSize];

            System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length);

            var handler = DataAvailable;

            if (handler != null)
            {
                handler(this, send);
            }

            Console.Write("\n Buffer: ");
            foreach (byte b in send)
            {
                Console.Write("\\x" + b);
            }
            Console.Write("\n");

            System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data);



            return(AudioUnitStatus.OK);
        }
Exemplo n.º 28
0
 AudioUnitStatus InputCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, global::AudioUnit.AudioUnit audioUnit)
 {
     inputCallbackEvent.Set();
     return(AudioUnitStatus.NoError);
 }
Exemplo n.º 29
0
        unsafe AudioUnitStatus AudioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags,
                                                        AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            _clock.Restart();
            AudioUnitStatus err = AudioUnitStatus.OK;

            if (_audioInputIsAvailable && _numInputChannels > 0)
            {
                err = _audioUnit.Render(ref actionFlags, timeStamp, 1, numberFrames, data);
            }

            var dataPtr = data[0].Data;

            if (_callback != null)
            {
                if (numberFrames > _sampleBuffer.NumSamples)
                {
                    PrepareFloatBuffers((int)numberFrames);
                }

                if (_audioInputIsAvailable && _numInputChannels > 0)
                {
                    var shortData = (short *)dataPtr.ToPointer();
                    if (_numInputChannels >= 2)
                    {
                        float *leftInput  = _inputChannels[0];
                        float *rightInput = _inputChannels[1];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *leftInput++  = *shortData++ *ShortToFloat;
                            *rightInput++ = *shortData++ *ShortToFloat;
                        }
                    }
                    else
                    {
                        float *leftInput = _inputChannels[0];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *leftInput++ = *shortData++ *ShortToFloat;
                            ++shortData;
                        }
                    }
                }
                else
                {
                    for (var i = _numInputChannels; --i >= 0;)
                    {
                        _sampleBuffer.ClearChannel(i);
                    }
                }

                _callback.AudioDeviceIOCallback(
                    _inputChannels, _numInputChannels,
                    _outputChannels, _numOutputChannels,
                    (int)numberFrames
                    );

                if (_out != null)
                {
                    byte *bytes = (byte *)_outputChannels[0];
                    for (var i = 0; i < numberFrames * sizeof(float); ++i)
                    {
                        _out.WriteByte(*bytes++);
                    }
                }

                {
                    var    shortData   = (short *)dataPtr.ToPointer();
                    float *leftOutput  = _outputChannels[0];
                    float *rightOutput = _outputChannels[1];
                    if (_numOutputChannels >= 2)
                    {
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *shortData++ = (short)(*leftOutput++ *FloatToShort);
                            *shortData++ = (short)(*rightOutput++ *FloatToShort);
                        }
                    }
                    else if (_numOutputChannels == 1)
                    {
                        float *output = _outputChannels[0];
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            short sample      = (short)(*output++ *FloatToShort);
                            *     shortData++ = sample;
                            *     shortData++ = sample;
                        }
                    }
                    else
                    {
                        for (var i = 0; i < numberFrames; ++i)
                        {
                            *shortData++ = 0;
                            *shortData++ = 0;
                        }
                    }
                }
            }
            else
            {
                var shortData = (short *)dataPtr.ToPointer();
                for (var i = 0; i < numberFrames; ++i)
                {
                    *shortData++ = 0;
                    *shortData++ = 0;
                }
            }

            _clock.Stop();

            return(err);
        }
Exemplo n.º 30
0
		static AudioUnitStatus renderCallback(IntPtr inRefCon,
					ref AudioUnitRenderActionFlags _ioActionFlags,
					ref AudioTimeStamp _inTimeStamp,
					uint _inBusNumber,
					uint _inNumberFrames,
					IntPtr _ioData)
		{
			// getting audiounit instance
			var handler = GCHandle.FromIntPtr (inRefCon);
			var inst = (AUGraph)handler.Target;
			HashSet<RenderDelegate> renderers = inst.graphUserCallbacks;

			if (renderers.Count != 0) {
				using (var buffers = new AudioBuffers (_ioData)) {
					foreach (RenderDelegate renderer in renderers)
						renderer (_ioActionFlags, _inTimeStamp, _inBusNumber, _inNumberFrames, buffers);
					return AudioUnitStatus.OK;
				}
			}

			return AudioUnitStatus.InvalidParameter;
		}
Exemplo n.º 31
0
        AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit)
        {
            MemoryStream ms = new MemoryStream();

            String s = "a000";

            byte[] bufWriter = Encoding.ASCII.GetBytes(s.ToCharArray(), 0, 4);
            ms.Write(bufWriter, 0, 4);

            bufWriter = BitConverter.GetBytes(AudioSessionId);
            if (BitConverter.IsLittleEndian)
            {
                Array.Reverse(bufWriter);
            }
            ms.Write(bufWriter, 0, 4);

            long time = (long)(DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds;

            //Console.WriteLine ((time - lasttime) + " ms delay");
            lasttime  = time;
            bufWriter = BitConverter.GetBytes(time);
            if (BitConverter.IsLittleEndian)
            {
                Array.Reverse(bufWriter);
            }
            ms.Write(bufWriter, 0, 8);

            var buffer = new AudioBuffer()
            {
                NumberChannels = 1,
                DataByteSize   = (int)numberFrames * 2,
                Data           = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2)
            };

            var bufferList = new AudioBuffers(1);

            bufferList[0] = buffer;

            var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList);

            var send = new byte[buffer.DataByteSize];

            System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length);

            ms.Write(send, 0, send.Length);

            Console.Write("\n Buffer: ");
            foreach (byte b in send)
            {
                Console.Write("\\x" + b);
            }
            Console.Write("\n");

            System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data);

            byte[] sendbuf = ms.ToArray();
            if (sendbuf.Length > 4096)
            {
                throw new Exception("Packet size too large!");
            }
            Task tk = Task.Factory.StartNew(() =>
            {
                try
                {
                    var aSender = audioCaller.BeginSend(sendbuf, sendbuf.Length, null, null);
                    aSender.AsyncWaitHandle.WaitOne(TimeSpan.FromSeconds(3));
                    if (aSender.IsCompleted)
                    {
                        audioCaller.EndSend(aSender);
                    }
                }
                catch
                {
                }
            });

            return(AudioUnitStatus.OK);
        }
Exemplo n.º 32
0
		public AudioUnitStatus TryRender(AudioUnitRenderActionFlags flags,
						AudioTimeStamp timeStamp,
						int outputBusnumber,
						int numberFrames, AudioBufferList data)
		{
			return (AudioUnitStatus) AudioUnitRender(handle,
								ref flags,
								ref timeStamp,
								outputBusnumber,
								numberFrames,
								data);
		}
Exemplo n.º 33
0
        AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit)
        {
            MemoryStream ms = new MemoryStream();

            String s = "a000";
            byte[] bufWriter = Encoding.ASCII.GetBytes(s.ToCharArray(), 0, 4);
            ms.Write(bufWriter, 0, 4);

            bufWriter = BitConverter.GetBytes(AudioSessionId);
            if (BitConverter.IsLittleEndian) Array.Reverse(bufWriter);
            ms.Write(bufWriter, 0, 4);

            long time = (long) (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds;

            //Console.WriteLine ((time - lasttime) + " ms delay");
            lasttime = time;
            bufWriter = BitConverter.GetBytes(time);
            if (BitConverter.IsLittleEndian) Array.Reverse(bufWriter);
            ms.Write(bufWriter, 0, 8);

            var buffer = new AudioBuffer()
                {
                    NumberChannels = 1,
                    DataByteSize = (int)numberFrames * 2,
                    Data = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2)
                };

            var bufferList = new AudioBuffers(1);
            bufferList[0] = buffer;

            var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList);

            var send = new byte[buffer.DataByteSize];
            System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length);

            ms.Write (send, 0, send.Length);

            Console.Write("\n Buffer: ");
            foreach (byte b in send)
                Console.Write("\\x" + b);
            Console.Write("\n");

            System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data);

            byte[] sendbuf = ms.ToArray();
            if (sendbuf.Length > 4096) throw new Exception("Packet size too large!");
            Task tk = Task.Factory.StartNew(() =>
                {
                    try
                    {
                        var aSender = audioCaller.BeginSend(sendbuf, sendbuf.Length, null, null);
                        aSender.AsyncWaitHandle.WaitOne(TimeSpan.FromSeconds(3));
                        if (aSender.IsCompleted) audioCaller.EndSend(aSender);
                    }
                    catch
                    {

                    }
                });

            return AudioUnitStatus.OK;
        }
		AudioUnitStatus Render (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			// Just return audio buffers from MTAudioProcessingTap.
			MTAudioProcessingTapFlags flags;
			CMTimeRange range;
			nint n;
			var error = (AudioUnitStatus)(int)audioProcessingTap.GetSourceAudio ((nint)numberFrames, data, out flags, out range, out n);
			if (error != AudioUnitStatus.NoError)
				Console.WriteLine ("{0} audioProcessingTap.GetSourceAudio failed", error);
			return error;
		}
Exemplo n.º 35
0
		AudioUnitStatus ConvertInputRenderCallback (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			data.SetData (0, preRenderData);
			return AudioUnitStatus.NoError;
		}
Exemplo n.º 36
0
        static int renderCallback(IntPtr inRefCon,
					  ref AudioUnitRenderActionFlags _ioActionFlags,
					  ref AudioTimeStamp _inTimeStamp,
					  int _inBusNumber,
					  int _inNumberFrames,
					  AudioBufferList _ioData)
        {
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (AUGraph)handler.Target;

            // invoke event handler with an argument
            if (inst.RenderCallback != null){
                var args = new AudioGraphEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst.RenderCallback(inst, args);
            }

            return 0; // noerror
        }
Exemplo n.º 37
0
		static extern AudioUnitStatus AudioUnitRender(IntPtr inUnit, ref AudioUnitRenderActionFlags ioActionFlags, ref AudioTimeStamp inTimeStamp,
						  uint inOutputBusNumber, uint inNumberFrames, IntPtr ioData);
Exemplo n.º 38
0
		public AudioUnitStatus Render (ref AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			return AudioUnitRender (handle, ref actionFlags, ref timeStamp, busNumber, numberFrames, (IntPtr) data);
		}
Exemplo n.º 39
0
		static AudioUnitStatus RenderCallbackImpl (IntPtr clientData, ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, IntPtr data)
		{
			GCHandle gch = GCHandle.FromIntPtr (clientData);
			var au = (AudioUnit) gch.Target;

			return au.render (actionFlags, timeStamp, busNumber, numberFrames, new AudioBuffers (data));
		}
 AudioUnitStatus ConvertInputRenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     data.SetData(0, preRenderData);
     return(AudioUnitStatus.NoError);
 }
		unsafe AudioUnitStatus HandleRenderDelegate (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			var sndbuf = soundBuffer [busNumber];

			var sample = sndbuf.SampleNum;      // frame number to start from
			var bufSamples = sndbuf.TotalFrames;  // total number of frames in the sound buffer
			var input = (int*) sndbuf.Data;

			var outA = (int*) data [0].Data; // output audio buffer for L channel
			var outB = (int*) data [1].Data; // output audio buffer for R channel

			// for demonstration purposes we've configured 2 stereo input busses for the mixer unit
			// but only provide a single channel of data from each input bus when asked and silence for the other channel
			// alternating as appropriate when asked to render bus 0 or bus 1's input
			for (var i = 0; i < numberFrames; ++i) {

				if (busNumber == 1) {
					outA [i] = 0;
					outB [i] = input [sample++];
				} else {
					outA [i] = input[sample++];
					outB [i] = 0;
				}

				if (sample > bufSamples) {
					// start over from the beginning of the data, our audio simply loops
					Debug.Print ("Looping data for bus {0} after {1} source frames rendered", busNumber, sample - 1);
					sample = 0;
				}
			}

			// keep track of where we are in the source data buffer
			sndbuf.SampleNum = sample;

			return AudioUnitStatus.OK;
		}
Exemplo n.º 42
0
        AudioUnitStatus _audioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            // getting microphone input signal
            _audioUnit.Render(ref actionFlags,
                              timeStamp,
                              1, // Remote input
                              numberFrames,
                              data);

            // Getting a pointer to a buffer to be filled
            IntPtr outL = data[0].Data;
            IntPtr outR = data[1].Data;

            // Getting signal level and trigger detection
            unsafe
            {
                var outLPtr = (int *)outL.ToPointer();
                for (int i = 0; i < numberFrames; i++)
                {
                    // LPF
                    float diff = Math.Abs(*outLPtr) - _signalLevel;
                    if (diff > 0)
                    {
                        _signalLevel += diff / 1000f;
                    }
                    else
                    {
                        _signalLevel += diff / 10000f;
                    }

                    diff = Math.Abs(diff);

                    // sound triger detection
                    if (_triggered <= 0 && diff > _threshold)
                    {
                        _triggered = _playingDuration;
                    }
                }
            }

            // playing sound
            unsafe
            {
                var outLPtr = (int *)outL.ToPointer();
                var outRPtr = (int *)outR.ToPointer();

                for (int i = 0; i < numberFrames; i++)
                {
                    _triggered = Math.Max(0, _triggered - 1);

                    if (_triggered <= 0)
                    {
                        // 0-filling
                        *outLPtr++ = 0;
                        *outRPtr++ = 0;
                    }
                    else
                    {
                        var buf0 = (int *)_buffer[0].Data;
                        var buf1 = (_numberOfChannels == 2) ? (int *)_buffer[1].Data : buf0;

                        if (_currentFrame >= _totalFrames)
                        {
                            _currentFrame = 0;
                        }

                        ++_currentFrame;
                        *outLPtr++ = buf0[_currentFrame];
                        *outRPtr++ = buf1[_currentFrame];
                    }
                }
            }

            return(AudioUnitStatus.NoError);
        }
Exemplo n.º 43
0
 public void Render(AudioUnitRenderActionFlags flags, AudioTimeStamp timeStamp, UInt32 outputBusnumber, UInt32 numberFrames, AudioBufferList data)
 {
     int err = AudioUnitRender (_audioUnit,
         ref flags,
         ref timeStamp,
         outputBusnumber,
         numberFrames,
         data);
     if (err != 0)
         throw new InvalidOperationException(String.Format("Error code:{0}", err));
 }
Exemplo n.º 44
0
		static AudioUnitStatus RenderCallbackImpl (IntPtr clientData, ref AudioUnitRenderActionFlags actionFlags, ref AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, IntPtr data)
		{
			GCHandle gch = GCHandle.FromIntPtr (clientData);
			var au = (AUGraph) gch.Target;

			RenderDelegate callback;
			if (!au.nodesCallbacks.TryGetValue (busNumber, out callback))
				return AudioUnitStatus.InvalidParameter;

			using (var buffers = new AudioBuffers (data)) {
				return callback (actionFlags, timeStamp, busNumber, numberFrames, buffers);
			}
		}
Exemplo n.º 45
0
 static extern AudioUnitStatus AudioUnitRender(IntPtr inUnit, ref AudioUnitRenderActionFlags ioActionFlags, ref AudioTimeStamp inTimeStamp,
                                               uint inOutputBusNumber, uint inNumberFrames, IntPtr ioData);
Exemplo n.º 46
0
		public void Render(AudioUnitRenderActionFlags flags,
				   AudioTimeStamp timeStamp,
				   int outputBusnumber,
				   int numberFrames, AudioBufferList data)
		{
			int err = AudioUnitRender(handle,
						  ref flags,
						  ref timeStamp,
						  outputBusnumber,
						  numberFrames,
						  data);
			if (err != 0)
				throw new AudioUnitException (err);
		}
Exemplo n.º 47
0
 public AudioUnitStatus Render(ref AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     return(AudioUnitRender(handle, ref actionFlags, ref timeStamp, busNumber, numberFrames, (IntPtr)data));
 }
Exemplo n.º 48
0
		static extern int AudioUnitRender(IntPtr inUnit,
						  ref AudioUnitRenderActionFlags ioActionFlags,
						  ref AudioTimeStamp inTimeStamp,
						  int inOutputBusNumber,
						  int inNumberFrames,
						  AudioBufferList ioData);
Exemplo n.º 49
0
        AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit)
        {
            var buffer = new AudioBuffer()
                {
                    NumberChannels = 1,
                    DataByteSize = (int)numberFrames * 2,
                    Data = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2)
                };

            var bufferList = new AudioBuffers(1);
            bufferList[0] = buffer;

            var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList);

            var send = new byte[buffer.DataByteSize];
            System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length);

            var handler = DataAvailable;
            if (handler != null)
                handler(this, send);

            Console.Write("\n Buffer: ");
            foreach (byte b in send)
                Console.Write("\\x" + b);
            Console.Write("\n");

            System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data);

            return AudioUnitStatus.OK;
        }
Exemplo n.º 50
0
 AudioUnitStatus MixerRenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
 {
     mixerRenderCallbackCount++;
     return(AudioUnitStatus.NoError);
 }
		AudioUnitStatus ConvertInputRenderCallback (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			renderTimeStamp.SampleTime += numberFrames;
			for (int channel = 0; channel < data.Count; channel++) {
				data.SetData (channel, preRenderData [channel]);
			}
			return AudioUnitStatus.NoError;
		}
		AudioUnitStatus _audioUnit_RenderCallback (AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
		{
			// getting microphone input signal
			_audioUnit.Render (ref actionFlags,
                timeStamp,
                1, // Remote input
               	numberFrames,
                data);

			// Getting a pointer to a buffer to be filled
			IntPtr outL = data [0].Data;
			IntPtr outR = data [1].Data;

			// Getting signal level and trigger detection
			unsafe {
				var outLPtr = (int*)outL.ToPointer ();
				for (int i = 0; i < numberFrames; i++) {
					// LPF
					float diff = Math.Abs (*outLPtr) - _signalLevel;
					if (diff > 0)
						_signalLevel += diff / 1000f;
					else
						_signalLevel += diff / 10000f;
                    
					diff = Math.Abs (diff);
                    
					// sound triger detection
					if (_triggered <= 0 && diff > _threshold) {
						_triggered = _playingDuration;
					}
				}
			}                        

			// playing sound
			unsafe {
				var outLPtr = (int*)outL.ToPointer ();
				var outRPtr = (int*)outR.ToPointer ();                
                
				for (int i = 0; i < numberFrames; i++) {                    
					_triggered = Math.Max (0, _triggered - 1);

					if (_triggered <= 0) {
						// 0-filling
						*outLPtr++ = 0;
						*outRPtr++ = 0;
					} else {
						var buf0 = (int*)_buffer [0].Data;
						var buf1 = (_numberOfChannels == 2) ? (int*)_buffer [1].Data : buf0;

						if (_currentFrame >= _totalFrames) {
							_currentFrame = 0;
						}
                        
						++_currentFrame;
						*outLPtr++ = buf0 [_currentFrame];
						*outRPtr++ = buf1 [_currentFrame];
					}
				}
			}

			return AudioUnitStatus.NoError;
		}
Exemplo n.º 53
0
        AudioUnitStatus renderDelegate(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data)
        {
            var err = rioUnit.Render (ref actionFlags, timeStamp, 1, numberFrames, data);

            if (err != AudioUnitStatus.OK) {
                return err;
            }

            if (FFTBufferManager == null)
                return AudioUnitStatus.OK;

            if (FFTBufferManager.NeedsNewAudioData) {
                FFTBufferManager.GrabAudioData (data);
            }

            Silence (data,(int)numberFrames);

            return AudioUnitStatus.OK;
        }