void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { System.Diagnostics.Debug.WriteLine("i"); // getting microphone _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); }
private void render2Channel16BitCallback(object sender, AudioUnitEventArgs e) { int dataFrameSize = e.NumberFrames * 2; int readSize = dataFrameSize; int nextOffset = readSize + dataOffset; int dataSize = sound.data.Length >> 1; if (nextOffset > dataSize) { readSize -= nextOffset - dataSize; } unsafe { int *channel1 = (int *)e.Data.Buffers[0].Data.ToPointer(); int *channel2 = (int *)e.Data.Buffers[1].Data.ToPointer(); fixed(byte *data = sound.data) { short *dataS = (short *)data; int i2 = dataOffset, readSizeLoop = readSize / 2; e.Data.Buffers[0].DataByteSize = readSize; e.Data.Buffers[1].DataByteSize = readSize; for (int i = 0; i != readSizeLoop; ++i) { int valueL = 0, valueR = 0; short *valueDataL = (short *)&valueL; short *valueDataR = (short *)&valueR; valueDataL[1] = (short)(dataS[i2] * volume); valueDataR[1] = (short)(dataS[i2 + 1] * volume); #if iOS channel1[i] = valueL; // / 128; channel2[i] = valueR; // / 128; #else channel1[i] = valueL; channel2[i] = valueR; #endif i2 += 2; } } } dataOffset += readSize; if (readSize != dataFrameSize) { dataOffset = 0; if (!looped) { Stop(); } } }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs e) { // reading buffer uint numberFrames = e.NumberFrames; numberFrames = _extAudioFile.Read(numberFrames, e.Data); // is EOF? if (numberFrames != e.NumberFrames) { // loop back to file head _extAudioFile.Seek(0); Stop(); } }
/* * void simulator_callback(object sender, AudioUnitEventArgs args) * { * // Generating sin waveform * double dphai = _frequency * 2.0 * Math.PI / _sampleRate; * * // Getting a pointer to a buffer to be filled * IntPtr outL = args.Data.mBuffers[0].mData; * IntPtr outR = args.Data.mBuffers[1].mData; * * // filling sin waveform. * // AudioUnitSampleType is different between a simulator (float32) and a real device (int32). * unsafe * { * var outLPtr = (float*)outL.ToPointer(); * var outRPtr = (float*)outR.ToPointer(); * for (int i = 0; i < args.NumberFrames; i++) * { * float sample = (float)Math.Sin(_phase) / 2048; * if (_isSquareWave) * { * if (sample > 0) * sample = 1.0f / 2048.0f; * else * sample = -1.0f / 2048.0f; * } * outLPtr++ = sample; * outRPtr++ = sample; * _phase += dphai; * } * } * _phase %= 2 * Math.PI; * }*/ // AudioUnit callback function uses this method to use instance variables. // In the static callback method is not convienient because instance variables can not used. void device_callback(object sender, AudioUnitEventArgs args) { // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; var buf = new int[args.NumberFrames]; // generating signal waveform switch (_waveFormType) { case Monotouch_AudioUnit_PlayingSinWaveform.WaveFormType.Sin: for (int i = 0; i < buf.Length; i++) { buf[i] = (int)(Math.Sin(2.0 * Math.PI * _phase / (double)_period) * int.MaxValue / 128.0); _phase = (_phase + 1) % _period; } break; case Monotouch_AudioUnit_PlayingSinWaveform.WaveFormType.Square: for (int i = 0; i < buf.Length; i++) { buf[i] = _phase < _period / 2 ? -1 * int.MaxValue / 128 : int.MaxValue / 128; _phase = (_phase + 1) % _period; } break; case Monotouch_AudioUnit_PlayingSinWaveform.WaveFormType.Impulse: for (int i = 0; i < buf.Length; i++) { buf[i] = _phase == 0 ? int.MaxValue / 128 : 0; _phase = (_phase + 1) % _period; } break; } // filling buffer unsafe { var outLPtr = (int *)outL.ToPointer(); var outRPtr = (int *)outR.ToPointer(); for (int i = 0; i < buf.Length; i++) { *outLPtr++ = buf[i]; *outRPtr++ = buf[i]; } } }
private void render1Channel16BitCallback(object sender, AudioUnitEventArgs e) { int dataFrameSize = e.NumberFrames; int readSize = dataFrameSize; int nextOffset = readSize + dataOffset; int dataSize = sound.data.Length >> 1; if (nextOffset > dataSize) { readSize -= nextOffset - dataSize; } unsafe { float *channel1 = (float *)e.Data.Buffers[0].Data.ToPointer(); fixed(byte *data = sound.data) { short *dataS = (short *)data; int i2 = dataOffset; e.Data.Buffers[0].DataByteSize = readSize * 2; for (int i = 0; i != readSize; ++i) { float value = 0; short *valueData = (short *)&value; valueData[1] = (short)(dataS[i2] * volume); #if iOS channel1[i] = value; // / 128; #else channel1[i] = value; #endif ++i2; } } } dataOffset += readSize; if (readSize != dataFrameSize) { dataOffset = 0; if (!looped) { Stop(); } } }
private void render1Channel8BitCallback(object sender, AudioUnitEventArgs e) { int dataFrameSize = e.NumberFrames; int readSize = dataFrameSize; int nextOffset = readSize + dataOffset; if (nextOffset > sound.data.Length) { readSize -= nextOffset - sound.data.Length; } unsafe { int *channel1 = (int *)e.Data.Buffers[0].Data.ToPointer(); fixed(byte *data = sound.data) { int i2 = dataOffset; e.Data.Buffers[0].DataByteSize = readSize; for (int i = 0; i != readSize; ++i) { int value = 0; short *valueData = (short *)&value; valueData[1] = (short)((data[i2] * 0.00392156862745f) * short.MaxValue * volume); // '* 0.00392156862745f' = '/ 255f' #if iOS channel1[i] = value; // / 128; #else channel1[i] = value; #endif ++i2; } } } dataOffset += readSize; if (readSize != dataFrameSize) { dataOffset = 0; if (!looped) { Stop(); } } }
void _callback(object sender, AudioUnitEventArgs args) { // getting microphone _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // level monitor float diff; float sig_level = _sigLevel; unsafe { var outLPtr = (Int32 *)outL.ToPointer(); var outRPtr = (Int32 *)outR.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { float val = *outLPtr; _dcLevel += (val - _dcLevel) * _lpf_constant; diff = Math.Abs(val - _dcLevel); sig_level += (diff - sig_level) * _lpf_constant; outLPtr++; } } //System.Diagnostics.Debug.WriteLine(String.Format("AD{0}: DC:{1}", sig_level, _dcLevel)); lock (this) { _sigLevel = sig_level; } }
// AudioUnit callback function uses this method to use instance variables. // In the static callback method is not convienient because instance variables can not used. void device_callback(object sender, AudioUnitEventArgs args) { // Generating sin waveform double dphai = _frequency * 2.0 * Math.PI / _sampleRate; // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // filling sin waveform. // AudioUnitSampleType is different between a simulator (float32) and a real device (int32). unsafe { var outLPtr = (int *)outL.ToPointer(); var outRPtr = (int *)outR.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { int sample = (int)(Math.Sin(_phase) * int.MaxValue / 128); // signal waveform format is fixed-point (8.24) if (_isSquareWave) { if (sample > 0) { sample = int.MaxValue / 128; } else { sample = -1 * int.MaxValue / 128; } } *outLPtr++ = sample; *outRPtr++ = sample; _phase += dphai; } } _phase %= 2 * Math.PI; }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; unsafe { var outLPtr = (int *)outL.ToPointer(); var outRPtr = (int *)outR.ToPointer(); var buf0 = (int *)_buffer.mBuffers[0].mData; int *buf1 = (_numberOfChannels == 2) ? (int *)_buffer.mBuffers[1].mData : buf0; for (int i = 0; i < args.NumberFrames; i++) { if (_isDone) { // 0-filling *outLPtr++ = 0; *outRPtr++ = 0; } else { if (!_isReverse) { // normal play if (_currentFrame >= _totalFrames) { _currentFrame = 0; if (!_isLoop) { _isDone = true; } } *outLPtr++ = buf0[++_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } else { // reverse if (_currentFrame <= 0) { _currentFrame = (uint)(_totalFrames - 1); if (_isLoop) { _isDone = true; } } *outLPtr++ = buf0[--_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } } } } if (_isDone) { Stop(); } }
static int renderCallback(IntPtr inRefCon, ref AudioUnitRenderActionFlags _ioActionFlags, ref AudioTimeStamp _inTimeStamp, int _inBusNumber, int _inNumberFrames, AudioBufferList _ioData) { // getting audiounit instance var handler = GCHandle.FromIntPtr(inRefCon); var inst = (AudioUnit)handler.Target; // evoke event handler with an argument if (inst.RenderCallback != null) { var args = new AudioUnitEventArgs( _ioActionFlags, _inTimeStamp, _inBusNumber, _inNumberFrames, _ioData); inst.RenderCallback(inst, args); } return 0; // noerror }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { // getting microphone input signal _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // Getting signal level and trigger detection unsafe { var outLPtr = (int *)outL.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { // LPF float diff = Math.Abs(*outLPtr) - _signalLevel; if (diff > 0) { _signalLevel += diff / 1000f; } else { _signalLevel += diff / 10000f; } diff = Math.Abs(diff); // sound triger detection if (_triggered <= 0 && diff > _threshold) { _triggered = _playingDuration; } } } // playing sound unsafe { var outLPtr = (int *)outL.ToPointer(); var outRPtr = (int *)outR.ToPointer(); var buf0 = (int *)_buffer.mBuffers[0].mData; int *buf1 = (_numberOfChannels == 2) ? (int *)_buffer.mBuffers[1].mData : buf0; for (int i = 0; i < args.NumberFrames; i++) { _triggered = Math.Max(0, _triggered - 1); if (_triggered <= 0) { // 0-filling *outLPtr++ = 0; *outRPtr++ = 0; } else { if (_currentFrame >= _totalFrames) { _currentFrame = 0; } *outLPtr++ = buf0[++_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } } } }
private void render2Channel8BitCallback(object sender, AudioUnitEventArgs e) { int dataFrameSize = e.NumberFrames * 2; int readSize = dataFrameSize; int nextOffset = readSize + dataOffset; if (nextOffset > sound.data.Length) readSize -= nextOffset - sound.data.Length; unsafe { int* channel1 = (int*)e.Data.Buffers[0].Data.ToPointer(); int* channel2 = (int*)e.Data.Buffers[1].Data.ToPointer(); fixed (byte* data = sound.data) { int i2 = dataOffset, readSizeLoop = readSize / 2; e.Data.Buffers[0].DataByteSize = readSizeLoop; e.Data.Buffers[1].DataByteSize = readSizeLoop; for (int i = 0; i != readSizeLoop; ++i) { int valueL = 0, valueR = 0; short* valueDataL = (short*)&valueL; short* valueDataR = (short*)&valueR; valueDataL[1] = (short)((data[i2] * 0.00392156862745f) * short.MaxValue * volume);// '* 0.00392156862745f' = '/ 255f' valueDataR[1] = (short)((data[i2+1] * 0.00392156862745f) * short.MaxValue * volume); #if iOS channel1[i] = valueL;// / 128; channel2[i] = valueR;// / 128; #else channel1[i] = valueL; channel2[i] = valueR; #endif i2 += 2; } } } dataOffset += readSize; if (readSize != dataFrameSize) { dataOffset = 0; if (!looped) Stop(); } }
private void render1Channel16BitCallback(object sender, AudioUnitEventArgs e) { int dataFrameSize = e.NumberFrames; int readSize = dataFrameSize; int nextOffset = readSize + dataOffset; int dataSize = sound.data.Length >> 1; if (nextOffset > dataSize) readSize -= nextOffset - dataSize; unsafe { float* channel1 = (float*)e.Data.Buffers[0].Data.ToPointer(); fixed (byte* data = sound.data) { short* dataS = (short*)data; int i2 = dataOffset; e.Data.Buffers[0].DataByteSize = readSize * 2; for (int i = 0; i != readSize; ++i) { float value = 0; short* valueData = (short*)&value; valueData[1] = (short)(dataS[i2] * volume); #if iOS channel1[i] = value;// / 128; #else channel1[i] = value; #endif ++i2; } } } dataOffset += readSize; if (readSize != dataFrameSize) { dataOffset = 0; if (!looped) Stop(); } }