uint TapProc(AudioQueueProcessingTap audioQueueTap, uint numberOfFrames, ref AudioTimeStamp timeStamp, ref AudioQueueProcessingTapFlags flags, AudioBuffers data) { AudioQueueProcessingTapFlags source_flags; uint source_frames; if (audioQueueTap.GetSourceAudio(numberOfFrames, ref timeStamp, out source_flags, out source_frames, data) != AudioQueueStatus.Ok) { throw new ApplicationException(); } preRenderData = data [0].Data; data.SetData(0, IntPtr.Zero); var renderTimeStamp = new AudioTimeStamp(); renderTimeStamp.Flags = AudioTimeStamp.AtsFlags.SampleTimeValid; AudioUnitRenderActionFlags action_flags = 0; var res = genericOutputUnit.Render(ref action_flags, renderTimeStamp, 0, numberOfFrames, data); if (res != AudioUnitStatus.NoError) { throw new ApplicationException(); } return(source_frames); }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { System.Diagnostics.Debug.WriteLine("i"); // getting microphone _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); }
AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit) { var buffer = new AudioBuffer() { NumberChannels = 1, DataByteSize = (int)numberFrames * 2, Data = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2) }; var bufferList = new AudioBuffers(1); bufferList[0] = buffer; var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList); var send = new byte[buffer.DataByteSize]; System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length); var handler = DataAvailable; if (handler != null) { handler(this, send); } Console.Write("\n Buffer: "); foreach (byte b in send) { Console.Write("\\x" + b); } Console.Write("\n"); System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data); return(AudioUnitStatus.OK); }
void _callback(object sender, AudioUnitEventArgs args) { // getting microphone _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // level monitor float diff; float sig_level = _sigLevel; unsafe { var outLPtr = (Int32 *)outL.ToPointer(); var outRPtr = (Int32 *)outR.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { float val = *outLPtr; _dcLevel += (val - _dcLevel) * _lpf_constant; diff = Math.Abs(val - _dcLevel); sig_level += (diff - sig_level) * _lpf_constant; outLPtr++; } } //System.Diagnostics.Debug.WriteLine(String.Format("AD{0}: DC:{1}", sig_level, _dcLevel)); lock (this) { _sigLevel = sig_level; } }
AudioUnitStatus AudioInputCallBack(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioUnit audioUnit) { MemoryStream ms = new MemoryStream(); String s = "a000"; byte[] bufWriter = Encoding.ASCII.GetBytes(s.ToCharArray(), 0, 4); ms.Write(bufWriter, 0, 4); bufWriter = BitConverter.GetBytes(AudioSessionId); if (BitConverter.IsLittleEndian) { Array.Reverse(bufWriter); } ms.Write(bufWriter, 0, 4); long time = (long)(DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalMilliseconds; //Console.WriteLine ((time - lasttime) + " ms delay"); lasttime = time; bufWriter = BitConverter.GetBytes(time); if (BitConverter.IsLittleEndian) { Array.Reverse(bufWriter); } ms.Write(bufWriter, 0, 8); var buffer = new AudioBuffer() { NumberChannels = 1, DataByteSize = (int)numberFrames * 2, Data = System.Runtime.InteropServices.Marshal.AllocHGlobal((int)numberFrames * 2) }; var bufferList = new AudioBuffers(1); bufferList[0] = buffer; var status = audioUnit.Render(ref actionFlags, timeStamp, busNumber, numberFrames, bufferList); var send = new byte[buffer.DataByteSize]; System.Runtime.InteropServices.Marshal.Copy(buffer.Data, send, 0, send.Length); ms.Write(send, 0, send.Length); Console.Write("\n Buffer: "); foreach (byte b in send) { Console.Write("\\x" + b); } Console.Write("\n"); System.Runtime.InteropServices.Marshal.FreeHGlobal(buffer.Data); byte[] sendbuf = ms.ToArray(); if (sendbuf.Length > 4096) { throw new Exception("Packet size too large!"); } Task tk = Task.Factory.StartNew(() => { try { var aSender = audioCaller.BeginSend(sendbuf, sendbuf.Length, null, null); aSender.AsyncWaitHandle.WaitOne(TimeSpan.FromSeconds(3)); if (aSender.IsCompleted) { audioCaller.EndSend(aSender); } } catch { } }); return(AudioUnitStatus.OK); }
void _audioUnit_RenderCallback(object sender, AudioUnitEventArgs args) { // getting microphone input signal _audioUnit.Render(args.ActionFlags, args.TimeStamp, 1, // Remote input args.NumberFrames, args.Data); // Getting a pointer to a buffer to be filled IntPtr outL = args.Data.mBuffers[0].mData; IntPtr outR = args.Data.mBuffers[1].mData; // Getting signal level and trigger detection unsafe { var outLPtr = (int *)outL.ToPointer(); for (int i = 0; i < args.NumberFrames; i++) { // LPF float diff = Math.Abs(*outLPtr) - _signalLevel; if (diff > 0) { _signalLevel += diff / 1000f; } else { _signalLevel += diff / 10000f; } diff = Math.Abs(diff); // sound triger detection if (_triggered <= 0 && diff > _threshold) { _triggered = _playingDuration; } } } // playing sound unsafe { var outLPtr = (int *)outL.ToPointer(); var outRPtr = (int *)outR.ToPointer(); var buf0 = (int *)_buffer.mBuffers[0].mData; int *buf1 = (_numberOfChannels == 2) ? (int *)_buffer.mBuffers[1].mData : buf0; for (int i = 0; i < args.NumberFrames; i++) { _triggered = Math.Max(0, _triggered - 1); if (_triggered <= 0) { // 0-filling *outLPtr++ = 0; *outRPtr++ = 0; } else { if (_currentFrame >= _totalFrames) { _currentFrame = 0; } *outLPtr++ = buf0[++_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } } } }
AudioUnitStatus _audioUnit_RenderCallback(AudioUnitRenderActionFlags actionFlags, AudioTimeStamp timeStamp, uint busNumber, uint numberFrames, AudioBuffers data) { // getting microphone input signal _audioUnit.Render(ref actionFlags, timeStamp, 1, // Remote input numberFrames, data); // Getting a pointer to a buffer to be filled IntPtr outL = data[0].Data; IntPtr outR = data[1].Data; // Getting signal level and trigger detection unsafe { var outLPtr = (int *)outL.ToPointer(); for (int i = 0; i < numberFrames; i++) { // LPF float diff = Math.Abs(*outLPtr) - _signalLevel; if (diff > 0) { _signalLevel += diff / 1000f; } else { _signalLevel += diff / 10000f; } diff = Math.Abs(diff); // sound triger detection if (_triggered <= 0 && diff > _threshold) { _triggered = _playingDuration; } } } // playing sound unsafe { var outLPtr = (int *)outL.ToPointer(); var outRPtr = (int *)outR.ToPointer(); for (int i = 0; i < numberFrames; i++) { _triggered = Math.Max(0, _triggered - 1); if (_triggered <= 0) { // 0-filling *outLPtr++ = 0; *outRPtr++ = 0; } else { var buf0 = (int *)_buffer[0].Data; var buf1 = (_numberOfChannels == 2) ? (int *)_buffer[1].Data : buf0; if (_currentFrame >= _totalFrames) { _currentFrame = 0; } ++_currentFrame; *outLPtr++ = buf0[_currentFrame]; *outRPtr++ = buf1[_currentFrame]; } } } return(AudioUnitStatus.NoError); }