unsafe AudioStreamPacketDescription [] RealReadFixedPackets(bool useCache, long inStartingPacket, int nPackets, byte [] buffer, int offset, int count) { // 16 == sizeof (AudioStreamPacketDescription) var b = Marshal.AllocHGlobal(16 * nPackets); try { fixed(byte *bop = &buffer[offset]) { var r = AudioFileReadPacketData(handle, useCache, ref count, &b, inStartingPacket, ref nPackets, (IntPtr)bop); if (r != 0) { return(null); } } var ret = new AudioStreamPacketDescription [nPackets]; int p = 0; for (int i = 0; i < nPackets; i++) { ret [i].StartOffset = Marshal.ReadInt64(b, p); ret [i].VariableFramesInPacket = Marshal.ReadInt32(b, p + 8); ret [i].DataByteSize = Marshal.ReadInt32(b, p + 12); p += 16; } return(ret); } finally { Marshal.FreeHGlobal(b); } }
extern static CMSampleBufferError CMAudioSampleBufferCreateWithPacketDescriptions ( IntPtr allocator, IntPtr dataBuffer, bool dataReady, IntPtr makeDataReadyCallback, IntPtr makeDataReadyRefcon, IntPtr formatDescription, int numSamples, CMTime sbufPTS, AudioStreamPacketDescription[] packetDescriptions, out IntPtr sBufOut);
unsafe AudioStreamPacketDescription [] RealReadPacketData(bool useCache, long inStartingPacket, ref int nPackets, IntPtr buffer, ref int count) { var descriptions = new AudioStreamPacketDescription [nPackets]; var r = AudioFileReadPacketData(handle, useCache, ref count, descriptions, inStartingPacket, ref nPackets, buffer); if (r == (int)AudioFileError.EndOfFile) { if (count == 0) { return(null); } } else if (r != 0) { return(null); } return(descriptions); }
static internal AudioStreamPacketDescription [] PacketDescriptionFrom(int nPackets, IntPtr b) { if (b == IntPtr.Zero) { return(new AudioStreamPacketDescription [0]); } var ret = new AudioStreamPacketDescription [nPackets]; int p = 0; for (int i = 0; i < nPackets; i++) { ret [i].StartOffset = Marshal.ReadInt64(b, p); ret [i].VariableFramesInPacket = Marshal.ReadInt32(b, p + 8); ret [i].DataByteSize = Marshal.ReadInt32(b, p + 12); p += 16; } return(ret); }
public static CMSampleBuffer CreateWithPacketDescriptions (CMBlockBuffer dataBuffer, CMFormatDescription formatDescription, int samplesCount, CMTime sampleTimestamp, AudioStreamPacketDescription[] packetDescriptions, out CMSampleBufferError error) { if (formatDescription == null) throw new ArgumentNullException ("formatDescription"); if (samplesCount <= 0) throw new ArgumentOutOfRangeException ("samplesCount"); IntPtr buffer; error = CMAudioSampleBufferCreateWithPacketDescriptions (IntPtr.Zero, dataBuffer == null ? IntPtr.Zero : dataBuffer.Handle, true, IntPtr.Zero, IntPtr.Zero, formatDescription.Handle, samplesCount, sampleTimestamp, packetDescriptions, out buffer); if (error != CMSampleBufferError.None) return null; return new CMSampleBuffer (buffer, true); }
unsafe AudioStreamPacketDescription [] RealReadFixedPackets(bool useCache, long inStartingPacket, int nPackets, byte [] buffer, int offset, int count) { var descriptions = new AudioStreamPacketDescription [nPackets]; fixed(byte *bop = &buffer[offset]) { var r = AudioFileReadPacketData(handle, useCache, ref count, descriptions, inStartingPacket, ref nPackets, (IntPtr)bop); if (r == (int)AudioFileError.EndOfFile) { if (count == 0) { return(null); } } else if (r != 0) { return(null); } } return(descriptions); }
static int complexInputDataProc( IntPtr inAudioConverrter, ref uint ioNumberDataPackets, AudioBufferList ioData, ref AudioStreamPacketDescription[] outDataPacketDescription, //AudioStreamPacketDescription** IntPtr inUserData ) { // getting audiounit instance var handler = GCHandle.FromIntPtr(inUserData); var inst = (_AudioConverter)handler.Target; // evoke event handler with an argument if (inst.EncoderCallback != null) { var args = new _AudioConverterEventArgs( ioNumberDataPackets, ioData, outDataPacketDescription); inst.EncoderCallback(inst, args); } return 0; // noerror }
static extern int AudioConverterFillComplexBuffer( IntPtr inAudioConverter, AudioConverterComplexInputDataProc inInputDataProc, IntPtr inInputDataProcUserData, ref uint ioOutputDataPacketSize, AudioBufferList outOutputData, AudioStreamPacketDescription[] outPacketDescription);
public void FillBuffer(AudioBufferList data, uint numberFrames, AudioStreamPacketDescription[] packetDescs) { uint numPackets = numberFrames; int err =AudioConverterFillComplexBuffer( _audioConverter, complexInputDataProc, GCHandle.ToIntPtr(_handle), ref numPackets, data, packetDescs); if(err != 0 || numPackets == 0) { throw new InvalidOperationException(String.Format("Error code:{0}", err)); } }
static AudioConverterError HandleInputData (ref int numberDataPackets, AudioBuffers data, ref AudioStreamPacketDescription[] dataPacketDescription) { int maxPackets = afio.SrcBufferSize / afio.SrcSizePerPacket; if (numberDataPackets > maxPackets) numberDataPackets = maxPackets; // read from the file int outNumBytes; var res = afio.SourceFile.ReadPackets (false, out outNumBytes, afio.PacketDescriptions, afio.SrcFilePos, ref numberDataPackets, afio.SrcBuffer); if (res != 0) { throw new ApplicationException (res.ToString ()); } // advance input file packet position afio.SrcFilePos += numberDataPackets; // put the data pointer into the buffer list data.SetData (0, afio.SrcBuffer, outNumBytes); // don't forget the packet descriptions if required if (dataPacketDescription != null) { if (afio.PacketDescriptions != null) { dataPacketDescription = afio.PacketDescriptions; } else { dataPacketDescription = null; } } return AudioConverterError.None; }
public static bool Convert(string input, string output, AudioFormatType targetFormat, AudioFileType containerType, Microsoft.Xna.Framework.Content.Pipeline.Audio.ConversionQuality quality) { CFUrl source = CFUrl.FromFile (input); CFUrl dest = CFUrl.FromFile (output); var dstFormat = new AudioStreamBasicDescription (); var sourceFile = AudioFile.Open (source, AudioFilePermission.Read); AudioFormatType outputFormat = targetFormat; // get the source data format var srcFormat = (AudioStreamBasicDescription)sourceFile.DataFormat; var outputSampleRate = 0; switch (quality) { case Microsoft.Xna.Framework.Content.Pipeline.Audio.ConversionQuality.Low: outputSampleRate = (int)Math.Max (8000, srcFormat.SampleRate / 2); break; default: outputSampleRate = (int)Math.Max (8000, srcFormat.SampleRate); break; } dstFormat.SampleRate = (outputSampleRate == 0 ? srcFormat.SampleRate : outputSampleRate); // set sample rate if (outputFormat == AudioFormatType.LinearPCM) { // if the output format is PC create a 16-bit int PCM file format description as an example dstFormat.Format = outputFormat; dstFormat.ChannelsPerFrame = srcFormat.ChannelsPerFrame; dstFormat.BitsPerChannel = 16; dstFormat.BytesPerPacket = dstFormat.BytesPerFrame = 2 * dstFormat.ChannelsPerFrame; dstFormat.FramesPerPacket = 1; dstFormat.FormatFlags = AudioFormatFlags.LinearPCMIsPacked | AudioFormatFlags.LinearPCMIsSignedInteger; } else { // compressed format - need to set at least format, sample rate and channel fields for kAudioFormatProperty_FormatInfo dstFormat.Format = outputFormat; dstFormat.ChannelsPerFrame = (outputFormat == AudioFormatType.iLBC ? 1 : srcFormat.ChannelsPerFrame); // for iLBC num channels must be 1 // use AudioFormat API to fill out the rest of the description var fie = AudioStreamBasicDescription.GetFormatInfo (ref dstFormat); if (fie != AudioFormatError.None) { return false; } } var converter = AudioConverter.Create (srcFormat, dstFormat); converter.InputData += HandleInputData; // if the source has a cookie, get it and set it on the Audio Converter ReadCookie (sourceFile, converter); // get the actual formats back from the Audio Converter srcFormat = converter.CurrentInputStreamDescription; dstFormat = converter.CurrentOutputStreamDescription; // if encoding to AAC set the bitrate to 192k which is a nice value for this demo // kAudioConverterEncodeBitRate is a UInt32 value containing the number of bits per second to aim for when encoding data if (dstFormat.Format == AudioFormatType.MPEG4AAC) { uint outputBitRate = 192000; // 192k // ignore errors as setting may be invalid depending on format specifics such as samplerate try { converter.EncodeBitRate = outputBitRate; } catch { } // get it back and print it out outputBitRate = converter.EncodeBitRate; } // create the destination file var destinationFile = AudioFile.Create (dest, containerType, dstFormat, AudioFileFlags.EraseFlags); // set up source buffers and data proc info struct afio = new AudioFileIO (32768); afio.SourceFile = sourceFile; afio.SrcFormat = srcFormat; if (srcFormat.BytesPerPacket == 0) { // if the source format is VBR, we need to get the maximum packet size // use kAudioFilePropertyPacketSizeUpperBound which returns the theoretical maximum packet size // in the file (without actually scanning the whole file to find the largest packet, // as may happen with kAudioFilePropertyMaximumPacketSize) afio.SrcSizePerPacket = sourceFile.PacketSizeUpperBound; // how many packets can we read for our buffer size? afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket; // allocate memory for the PacketDescription structures describing the layout of each packet afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead]; } else { // CBR source format afio.SrcSizePerPacket = srcFormat.BytesPerPacket; afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket; // allocate memory for the PacketDescription structures describing the layout of each packet afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead]; } // set up output buffers int outputSizePerPacket = dstFormat.BytesPerPacket; // this will be non-zero if the format is CBR const int theOutputBufSize = 32768; var outputBuffer = Marshal.AllocHGlobal (theOutputBufSize); AudioStreamPacketDescription[] outputPacketDescriptions = null; if (outputSizePerPacket == 0) { // if the destination format is VBR, we need to get max size per packet from the converter outputSizePerPacket = (int)converter.MaximumOutputPacketSize; } // allocate memory for the PacketDescription structures describing the layout of each packet outputPacketDescriptions = new AudioStreamPacketDescription [theOutputBufSize / outputSizePerPacket]; int numOutputPackets = theOutputBufSize / outputSizePerPacket; // if the destination format has a cookie, get it and set it on the output file WriteCookie (converter, destinationFile); // write destination channel layout if (srcFormat.ChannelsPerFrame > 2) { WriteDestinationChannelLayout (converter, sourceFile, destinationFile); } long totalOutputFrames = 0; // used for debugging long outputFilePos = 0; AudioBuffers fillBufList = new AudioBuffers (1); bool error = false; // loop to convert data while (true) { // set up output buffer list fillBufList [0] = new AudioBuffer () { NumberChannels = dstFormat.ChannelsPerFrame, DataByteSize = theOutputBufSize, Data = outputBuffer }; // convert data int ioOutputDataPackets = numOutputPackets; var fe = converter.FillComplexBuffer (ref ioOutputDataPackets, fillBufList, outputPacketDescriptions); // if interrupted in the process of the conversion call, we must handle the error appropriately if (fe != AudioConverterError.None) { error = true; break; } if (ioOutputDataPackets == 0) { // this is the EOF conditon break; } // write to output file var inNumBytes = fillBufList [0].DataByteSize; var we = destinationFile.WritePackets (false, inNumBytes, outputPacketDescriptions, outputFilePos, ref ioOutputDataPackets, outputBuffer); if (we != 0) { error = true; break; } // advance output file packet position outputFilePos += ioOutputDataPackets; if (dstFormat.FramesPerPacket != 0) { // the format has constant frames per packet totalOutputFrames += (ioOutputDataPackets * dstFormat.FramesPerPacket); } else { // variable frames per packet require doing this for each packet (adding up the number of sample frames of data in each packet) for (var i = 0; i < ioOutputDataPackets; ++i) totalOutputFrames += outputPacketDescriptions [i].VariableFramesInPacket; } } Marshal.FreeHGlobal (outputBuffer); if (!error) { // write out any of the leading and trailing frames for compressed formats only if (dstFormat.BitsPerChannel == 0) { // our output frame count should jive with WritePacketTableInfo (converter, destinationFile); } // write the cookie again - sometimes codecs will update cookies at the end of a conversion WriteCookie (converter, destinationFile); } converter.Dispose (); destinationFile.Dispose (); sourceFile.Dispose (); return true; }