Esempio n. 1
0
        static int renderCallback(IntPtr inRefCon,
            ref AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
            ref AudioTimeStamp _inTimeStamp,
            uint _inBusNumber,
            uint _inNumberFrames,
            AudioBufferList _ioData)
        {
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (AUGraph)handler.Target;

            // evoke event handler with an argument
            if (inst.RenderCallback != null)
            {
                var args = new AudioGraphEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst.RenderCallback(inst, args);
            }

            return 0; // noerror
        }
        static int renderCallback(IntPtr inRefCon,
                                  ref AudioUnitRenderActionFlags _ioActionFlags,
                                  ref AudioTimeStamp _inTimeStamp,
                                  uint _inBusNumber,
                                  uint _inNumberFrames,
                                  AudioBufferList _ioData)
        {
            //System.Diagnostics.Debug.WriteLine(_ioActionFlags);
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst    = (AudioUnit)handler.Target;

            // evoke event handler with an argument
            if (inst._renderEvent != null)
            {
                var args = new AudioUnitEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst._renderEvent(inst, args);
            }

            return(0); // noerror
        }
Esempio n. 3
0
        static int renderCallback(IntPtr inRefCon,
            ref AudioUnitRenderActionFlags _ioActionFlags,
            ref AudioTimeStamp _inTimeStamp,
            uint _inBusNumber,
            uint _inNumberFrames,
            AudioBufferList _ioData)
        {
            //System.Diagnostics.Debug.WriteLine(_ioActionFlags);
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (AudioUnit)handler.Target;
            
            // evoke event handler with an argument
            if (inst._renderEvent != null) 
            {
                var args = new AudioUnitEventArgs(
                    _ioActionFlags,
                    _inTimeStamp,
                    _inBusNumber,
                    _inNumberFrames,
                    _ioData);
                inst._renderEvent(inst, args);
            }

            return 0; // noerror
        }
 static extern int AudioUnitRender(IntPtr inUnit,
                                   ref AudioUnitRenderActionFlags ioActionFlags,
                                   ref AudioTimeStamp inTimeStamp,
                                   UInt32 inOutputBusNumber,
                                   UInt32 inNumberFrames,
                                   AudioBufferList ioData
                                   );
 public AudioGraphEventArgs(AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
     MonoTouch.AudioToolbox.AudioTimeStamp _inTimeStamp,
     uint _inBusNumber,
     uint _inNumberFrames,
     AudioBufferList _ioData)
     : base(_ioActionFlags, _inTimeStamp, _inBusNumber, _inNumberFrames, _ioData)
 {
 }
Esempio n. 6
0
 public AudioGraphEventArgs(AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
                            MonoTouch.AudioToolbox.AudioTimeStamp _inTimeStamp,
                            uint _inBusNumber,
                            uint _inNumberFrames,
                            AudioBufferList _ioData)
     : base(_ioActionFlags, _inTimeStamp, _inBusNumber, _inNumberFrames, _ioData)
 {
 }
Esempio n. 7
0
        public void Write(uint numberFrames, AudioBufferList data)
        {
            int err = ExtAudioFileWrite(_extAudioFile, numberFrames, data);

            if (err != 0)
            {
                throw new ArgumentException(String.Format("Error code:{0}", err));
            }
        }
Esempio n. 8
0
        public uint Read(uint numberFrames, AudioBufferList data)
        {
            int err = ExtAudioFileRead(_extAudioFile, ref numberFrames, data);

            if (err != 0)
            {
                throw new ArgumentException(String.Format("Error code:{0}", err));
            }

            return(numberFrames);
        }
Esempio n. 9
0
 public AudioUnitEventArgs(AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
                           MonoTouch.AudioToolbox.AudioTimeStamp _inTimeStamp,
                           uint _inBusNumber,
                           uint _inNumberFrames,
                           AudioBufferList _ioData)
 {
     ActionFlags    = _ioActionFlags;
     this.TimeStamp = _inTimeStamp;
     BusNumber      = _inBusNumber;
     NumberFrames   = _inNumberFrames;
     Data           = _ioData;
 }
 public AudioUnitEventArgs(AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
     MonoTouch.AudioToolbox.AudioTimeStamp _inTimeStamp,
     uint _inBusNumber,
     uint _inNumberFrames,
     AudioBufferList _ioData)
 {
     ActionFlags = _ioActionFlags;
     this.TimeStamp = _inTimeStamp;
     BusNumber = _inBusNumber;
     NumberFrames = _inNumberFrames;
     Data = _ioData;
 }
 public void FillBuffer(AudioBufferList data, uint numberFrames, AudioStreamPacketDescription[] packetDescs)
 {
     uint numPackets = numberFrames;
     int err =AudioConverterFillComplexBuffer(                
         _audioConverter,
         complexInputDataProc,
         GCHandle.ToIntPtr(_handle),
         ref numPackets,
         data,
         packetDescs);
     if(err != 0 || numPackets == 0) {
         throw new InvalidOperationException(String.Format("Error code:{0}", err));
     }            
 }
Esempio n. 12
0
        static int device_renderCallback(IntPtr inRefCon,
            ref AudioUnit.AudioUnitRenderActionFlags _ioActionFlags,
            ref AudioTimeStamp _inTimeStamp,
            uint _inBusNumber,
            uint _inNumberFrames,
            AudioBufferList _ioData)
        {
            System.Diagnostics.Debug.WriteLine("o");

            var handler = GCHandle.FromIntPtr(inRefCon);
            var inst = (RemoteOutput)handler.Target;
            var waveDef = inst._waveDef[_inBusNumber];

            double dphai = 2 * Math.PI * waveDef.frequency / waveDef.sampleRate;
            double phase = waveDef.phase;

            // Getting a pointer to a buffer to be filled
            IntPtr outL = _ioData.mBuffers[0].mData;
            IntPtr outR = _ioData.mBuffers[1].mData;

            // filling sin waveform.
            // AudioUnitSampleType is different between a simulator (float32) and a real device (int32).
            unsafe
            {
                var outLPtr = (int*)outL.ToPointer();
                var outRPtr = (int*)outR.ToPointer();
                for (int i = 0; i < _inNumberFrames; i++)
                {
                    int sample = (int)(Math.Sin(phase) * int.MaxValue / 128); // signal waveform format is fixed-point (8.24)
                    *outLPtr++ = sample;
                    *outRPtr++ = sample;
                    phase += dphai;
                }
            }
            waveDef.phase = phase % (2 * Math.PI);
            return 0;
        }
        static int complexInputDataProc(
            IntPtr inAudioConverrter,
            ref uint ioNumberDataPackets,
            AudioBufferList ioData,
            ref AudioStreamPacketDescription[] outDataPacketDescription, //AudioStreamPacketDescription**
            IntPtr inUserData
            )
        {
            // getting audiounit instance
            var handler = GCHandle.FromIntPtr(inUserData);
            var inst = (_AudioConverter)handler.Target;

            // evoke event handler with an argument
            if (inst.EncoderCallback != null)
            {
                var args = new _AudioConverterEventArgs(
                    ioNumberDataPackets,
                    ioData,
                    outDataPacketDescription);
                inst.EncoderCallback(inst, args);
            }

            return 0; // noerror
        }
 static extern int AudioConverterFillComplexBuffer(
     IntPtr 		inAudioConverter,
     AudioConverterComplexInputDataProc	inInputDataProc,
     IntPtr inInputDataProcUserData,
     ref uint ioOutputDataPacketSize,
     AudioBufferList outOutputData,
     AudioStreamPacketDescription[] outPacketDescription);
Esempio n. 15
0
        public uint Read(uint numberFrames, AudioBufferList data)
        {            
            int err = ExtAudioFileRead(_extAudioFile, ref numberFrames, data);
            if (err != 0)
            {
                throw new ArgumentException(String.Format("Error code:{0}", err));
            }

            return numberFrames;
        }
Esempio n. 16
0
 static extern int AudioUnitRender(IntPtr inUnit,
     ref AudioUnitRenderActionFlags ioActionFlags,
     ref AudioTimeStamp inTimeStamp,
     UInt32 inOutputBusNumber,
     UInt32 inNumberFrames,
     AudioBufferList ioData
     );
Esempio n. 17
0
 public void Render(AudioUnitRenderActionFlags flags, AudioTimeStamp timeStamp, UInt32 outputBusnumber, UInt32 numberFrames, AudioBufferList data)
 {
     int err = AudioUnitRender (_audioUnit,
         ref flags,
         ref timeStamp,
         outputBusnumber,
         numberFrames,
         data);
     if (err != 0)
         throw new InvalidOperationException(String.Format("Error code:{0}", err));
 }
Esempio n. 18
0
 static extern int ExtAudioFileWrite(IntPtr inExtAudioFile, uint inNumberFrames, AudioBufferList ioData);
Esempio n. 19
0
 static extern int ExtAudioFileRead(IntPtr inExtAudioFile, ref uint ioNumberFrames, AudioBufferList ioData);
Esempio n. 20
0
 public void Write(uint numberFrames, AudioBufferList data)
 {
     int err = ExtAudioFileWrite(_extAudioFile, numberFrames, data);
     if (err != 0)
         throw new ArgumentException(String.Format("Error code:{0}", err));
 }
		unsafe static void RenderAudio (CFUrl sourceUrl, CFUrl destinationUrl)
		{
			AudioStreamBasicDescription dataFormat;
			AudioQueueBuffer *buffer = null;
			long currentPacket = 0;
			int packetsToRead = 0;
			AudioStreamPacketDescription [] packetDescs = null;
			bool flushed = false;
			bool done = false;
			int bufferSize;
			
			using (var audioFile = AudioFile.Open (sourceUrl, AudioFilePermission.Read, (AudioFileType) 0)) {
				dataFormat = audioFile.StreamBasicDescription;
				
				using (var queue = new OutputAudioQueue (dataFormat, CFRunLoop.Current, CFRunLoop.CFRunLoopCommonModes)) {
					queue.OutputCompleted += (sender, e) => 
					{
						HandleOutput (audioFile, queue, buffer, ref packetsToRead, ref currentPacket, ref done, ref flushed, ref packetDescs);
					};
					
					// we need to calculate how many packets we read at a time and how big a buffer we need
					// we base this on the size of the packets in the file and an approximate duration for each buffer
					bool isVBR = dataFormat.BytesPerPacket == 0 || dataFormat.FramesPerPacket == 0;
					
					// first check to see what the max size of a packet is - if it is bigger
					// than our allocation default size, that needs to become larger
					// adjust buffer size to represent about a second of audio based on this format 
					CalculateBytesForTime (dataFormat, audioFile.MaximumPacketSize, 1.0, out bufferSize, out packetsToRead);
				
					if (isVBR) {
						packetDescs = new AudioStreamPacketDescription [packetsToRead];
					} else {
						packetDescs = null; // we don't provide packet descriptions for constant bit rate formats (like linear PCM)
					}
				
					if (audioFile.MagicCookie.Length != 0)
						queue.MagicCookie = audioFile.MagicCookie;
		
					// allocate the input read buffer
					queue.AllocateBuffer (bufferSize, out buffer);
					
					// prepare the capture format
					AudioStreamBasicDescription captureFormat;
					captureFormat.SampleRate = dataFormat.SampleRate;
					captureFormat.Format = AudioFormatType.LinearPCM;
					captureFormat.FormatFlags = AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsPacked | 
							(AudioFormatFlags) (24 << (int) AudioFormatFlags.LinearPCMSampleFractionShift);
					captureFormat.ChannelsPerFrame = dataFormat.ChannelsPerFrame;
					captureFormat.FramesPerPacket = 1;
					captureFormat.BitsPerChannel = 32;
					captureFormat.BytesPerPacket = dataFormat.ChannelsPerFrame * 4;
					captureFormat.BytesPerFrame = captureFormat.BytesPerPacket;
					
					queue.SetOfflineRenderFormat (captureFormat, audioFile.ChannelLayout);
					
					// prepare the target format
					AudioStreamBasicDescription dstFormat;
					dstFormat.SampleRate = dataFormat.SampleRate;
					dstFormat.ChannelsPerFrame = dataFormat.ChannelsPerFrame;
					dstFormat.Format = AudioFormatType.LinearPCM;
					dstFormat.FormatFlags = AudioFormatFlags.IsPacked | AudioFormatFlags.LinearPCMIsSignedInteger;
					dstFormat.BitsPerChannel = 16;
					dstFormat.BytesPerPacket = 2 * dstFormat.ChannelsPerFrame;
					dstFormat.BytesPerFrame = dstFormat.BytesPerPacket;
					dstFormat.FramesPerPacket = 1;
					
					using (var captureFile = ExtAudioFile.CreateWithUrl (destinationUrl, AudioFileType.CAF, dstFormat, AudioFileFlags.EraseFlags)) {
						captureFile.ClientDataFormat = captureFormat;
						
						int captureBufferSize = bufferSize / 2;
						var captureABL = new AudioBufferList (1);
						
						AudioQueueBuffer *captureBuffer;
						queue.AllocateBuffer (captureBufferSize, out captureBuffer);
						
						captureABL.Buffers [0].Data = captureBuffer->AudioData;
						captureABL.Buffers [0].NumberChannels = captureFormat.ChannelsPerFrame;
						
						queue.Start ();
						
						double ts = 0;
						queue.RenderOffline (ts, captureBuffer, 0);
						
						HandleOutput (audioFile, queue, buffer, ref packetsToRead, ref currentPacket, ref done, ref flushed, ref packetDescs);
						
						while (true) {
							int reqFrames = captureBufferSize / captureFormat.BytesPerFrame;
							
							queue.RenderOffline (ts, captureBuffer, reqFrames);
							
							captureABL.Buffers [0].Data = captureBuffer->AudioData;
							captureABL.Buffers [0].DataByteSize = (int) captureBuffer->AudioDataByteSize;
							int writeFrames = captureABL.Buffers [0].DataByteSize / captureFormat.BytesPerFrame;
							
							// Console.WriteLine ("ts: {0} AudioQueueOfflineRender: req {1} frames / {2} bytes, got {3} frames / {4} bytes", 
							//	ts, reqFrames, captureBufferSize, writeFrames, captureABL.Buffers [0].DataByteSize);
							
							captureFile.WriteAsync (writeFrames, captureABL);
							
							if (flushed)
								break;
							
							ts += writeFrames;
						}
					
						CFRunLoop.Current.RunInMode (CFRunLoop.CFDefaultRunLoopMode, 1, false);
					}
				}
			}
		}
        void prepareExtAudioFile()
        {
            // Opening Audio File
            _extAudioFile = ExtAudioFile.OpenUrl(_url);

            // Getting file data format
            _srcFormat = _extAudioFile.FileDataFormat;

            // Setting the channel number of the output format same to the input format
            _dstFormat = AudioUnitUtils.AUCanonicalASBD(_sampleRate, _srcFormat.ChannelsPerFrame);

            // setting reading format as audio unit cannonical format
            _extAudioFile.ClientDataFormat = _dstFormat;

            // getting total frame
            _totalFrames = _extAudioFile.FileLengthFrames;

            // Aloocating AudoBufferList
            _buffer = new MutableAudioBufferList(_srcFormat.ChannelsPerFrame, (int) (sizeof(uint) * _totalFrames));
            _numberOfChannels = _srcFormat.ChannelsPerFrame;

            // Reading all frame into the buffer
            _extAudioFile.Read((int) _totalFrames, _buffer);
        }
        public void Render(AudioUnitRenderActionFlags flags, AudioTimeStamp timeStamp, UInt32 outputBusnumber, UInt32 numberFrames, AudioBufferList data)
        {
            int err = AudioUnitRender(_audioUnit,
                                      ref flags,
                                      ref timeStamp,
                                      outputBusnumber,
                                      numberFrames,
                                      data);

            if (err != 0)
            {
                throw new InvalidOperationException(String.Format("Error code:{0}", err));
            }
        }
Esempio n. 24
0
 static extern int ExtAudioFileWrite(IntPtr inExtAudioFile, uint inNumberFrames, AudioBufferList ioData);
Esempio n. 25
0
 static extern int ExtAudioFileRead(IntPtr  inExtAudioFile, ref uint ioNumberFrames, AudioBufferList ioData);