void simulator_callback(object sender, AudioUnitEventArgs args) { // Generating sin waveform double dphai = _frequency * 2.0 * Math.PI / _sampleRate; // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // filling sin waveform. // AudioUnitSampleType is different between a simulator (float32) and a real device (int32). unsafe { var outLPtr = (float*)outL.ToPointer(); var outRPtr = (float*)outR.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { float sample = (float)Math.Sin(_phase) / 2048; if (_isSquareWave) { if (sample > 0) sample = 1.0f / 2048.0f; else sample = -1.0f / 2048.0f; } *outLPtr++ = sample; *outRPtr++ = sample; _phase += dphai; } } _phase %= 2 * Math.PI; }
// AudioUnit callback function uses this method to use instance variables. // In the static callback method is not convienient because instance variables can not used. void device_callback(object sender, AudioUnitEventArgs args) { // Generating sin waveform double dphai = 440 * 2.0 * Math.PI / _sampleRate; // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.Buffers[0].Data; IntPtr outR = args.Data.Buffers[1].Data; // filling sin waveform. // AudioUnitSampleType is different between a simulator (float32) and a real device (int32). unsafe { var outLPtr = (int*)outL.ToPointer(); var outRPtr = (int*)outR.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { int sample = (int)(Math.Sin(_phase) * int.MaxValue / 128); // signal waveform format is fixed-point (8.24) *outLPtr++ = sample; *outRPtr++ = sample; _phase += dphai; } } _phase %= 2 * Math.PI; }
static int renderCallback(IntPtr inRefCon, ref AudioUnitRenderActionFlags _ioActionFlags, ref AudioTimeStamp _inTimeStamp, uint _inBusNumber, uint _inNumberFrames, AudioBufferList _ioData) { //System.Diagnostics.Debug.WriteLine(_ioActionFlags); // getting audiounit instance var handler = GCHandle.FromIntPtr(inRefCon); var inst = (AudioUnit)handler.Target; // evoke event handler with an argument if (inst._renderEvent != null) { var args = new AudioUnitEventArgs( _ioActionFlags, _inTimeStamp, _inBusNumber, _inNumberFrames, _ioData); inst._renderEvent(inst, args); } return(0); // noerror }
// AudioUnit callback function uses this method to use instance variables. // In the static callback method is not convienient because instance variables can not used. void device_callback(object sender, AudioUnitEventArgs args) { // getting microphone _audioUnit.Render(args.ActionFlags, args.TimeStamp, 0, // Remote input args.NumberFrames, args.Data); }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs e) { // reading buffer uint numberFrames = e.NumberFrames; numberFrames = _extAudioFile.Read(numberFrames, e.Data); // is EOF? if (numberFrames != e.NumberFrames) { // loop back to file head _extAudioFile.Seek(0); Stop(); } }
/* void simulator_callback(object sender, AudioUnitEventArgs args) { // Generating sin waveform double dphai = _frequency * 2.0 * Math.PI / _sampleRate; // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // filling sin waveform. // AudioUnitSampleType is different between a simulator (float32) and a real device (int32). unsafe { var outLPtr = (float*)outL.ToPointer(); var outRPtr = (float*)outR.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { float sample = (float)Math.Sin(_phase) / 2048; if (_isSquareWave) { if (sample > 0) sample = 1.0f / 2048.0f; else sample = -1.0f / 2048.0f; } *outLPtr++ = sample; *outRPtr++ = sample; _phase += dphai; } } _phase %= 2 * Math.PI; }*/ // AudioUnit callback function uses this method to use instance variables. // In the static callback method is not convienient because instance variables can not used. void device_callback(object sender, AudioUnitEventArgs args) { // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; var buf = new int[args.NumberFrames]; // generating signal waveform switch (_waveFormType) { case Monotouch_AudioUnit_PlayingSinWaveform.WaveFormType.Sin: for (int i = 0; i < buf.Length; i++) { buf[i] = (int)(Math.Sin(2.0 * Math.PI * _phase / (double)_period) * int.MaxValue / 128.0); _phase = (_phase + 1) % _period; } break; case Monotouch_AudioUnit_PlayingSinWaveform.WaveFormType.Square: for (int i = 0; i < buf.Length; i++) { buf[i] = _phase < _period /2 ? -1 * int.MaxValue / 128 : int.MaxValue / 128; _phase = (_phase + 1) % _period; } break; case Monotouch_AudioUnit_PlayingSinWaveform.WaveFormType.Impulse: for (int i = 0; i < buf.Length; i++) { buf[i] = _phase == 0 ? int.MaxValue / 128 : 0; _phase = (_phase + 1) % _period; } break; } // filling buffer unsafe { var outLPtr = (int*)outL.ToPointer(); var outRPtr = (int*)outR.ToPointer(); for(int i=0; i< buf.Length; i++) { *outLPtr++ = buf[i]; *outRPtr++ = buf[i]; } } }
void _callback(object sender, AudioUnitEventArgs args) { // getting microphone _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // level monitor float diff; float sig_level = _sigLevel; unsafe { var outLPtr = (Int32*)outL.ToPointer(); var outRPtr = (Int32*)outR.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { float val = *outLPtr; _dcLevel += (val - _dcLevel) * _lpf_constant; diff = Math.Abs(val - _dcLevel); sig_level += (diff - sig_level) * _lpf_constant; outLPtr++; } } //System.Diagnostics.Debug.WriteLine(String.Format("AD{0}: DC:{1}", sig_level, _dcLevel)); lock (this) { _sigLevel = sig_level; } }
static int renderCallback(IntPtr inRefCon, ref AudioUnitRenderActionFlags _ioActionFlags, ref AudioTimeStamp _inTimeStamp, uint _inBusNumber, uint _inNumberFrames, AudioBufferList _ioData) { //System.Diagnostics.Debug.WriteLine(_ioActionFlags); // getting audiounit instance var handler = GCHandle.FromIntPtr(inRefCon); var inst = (AudioUnit)handler.Target; // evoke event handler with an argument if (inst._renderEvent != null) { var args = new AudioUnitEventArgs( _ioActionFlags, _inTimeStamp, _inBusNumber, _inNumberFrames, _ioData); inst._renderEvent(inst, args); } return 0; // noerror }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.Buffers[0].Data; IntPtr outR = args.Data.Buffers[1].Data; unsafe { var outLPtr = (int*)outL.ToPointer(); var outRPtr = (int*)outR.ToPointer(); var buf0 = (int*)_buffer.Buffers[0].Data; int *buf1= (_numberOfChannels == 2) ? (int*)_buffer.Buffers[1].Data : buf0; for (int i = 0; i < args.NumberFrames; i++) { if (_isDone) { // 0-filling *outLPtr++ = 0; *outRPtr++ = 0; } else { if (!_isReverse) { // normal play if (_currentFrame >= _totalFrames) { _currentFrame = 0; if (!_isLoop) { _isDone = true; } } *outLPtr++ = buf0[++_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } else { // reverse if (_currentFrame <= 0) { _currentFrame = (uint)( _totalFrames - 1); if (_isLoop) { _isDone = true; } } *outLPtr++ = buf0[--_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } } } } if (_isDone) { Stop(); } }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { System.Diagnostics.Debug.WriteLine("i"); // getting microphone _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { Console.WriteLine ("Invoked"); // getting microphone input signal _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.Buffers[0].Data; IntPtr outR = args.Data.Buffers[1].Data; // Getting signal level and trigger detection unsafe { var outLPtr = (int*)outL.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { // LPF float diff = Math.Abs(*outLPtr) - _signalLevel; if (diff > 0) _signalLevel += diff / 1000f; else _signalLevel += diff / 10000f; diff = Math.Abs(diff); // sound triger detection if (_triggered <= 0 && diff > _threshold) { _triggered = _playingDuration; } } } // playing sound unsafe { var outLPtr = (int*)outL.ToPointer(); var outRPtr = (int*)outR.ToPointer(); var buf0 = (int*)_buffer.Buffers[0].Data; int *buf1= (_numberOfChannels == 2) ? (int*)_buffer.Buffers[1].Data : buf0; for (int i = 0; i < args.NumberFrames; i++) { _triggered = Math.Max(0, _triggered -1); if (_triggered <= 0) { // 0-filling *outLPtr++ = 0; *outRPtr++ = 0; } else { if (_currentFrame >= _totalFrames) { _currentFrame = 0; } *outLPtr++ = buf0[++_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } } } }