Example #1
0
        public static AudioStreamBasicDescription[] GetAvailableStreamDescriptions(AudioFileType fileType, AudioFormatType formatType)
        {
            AudioFileTypeAndFormatID input;

            input.FileType   = fileType;
            input.FormatType = formatType;

            uint size;

            if (AudioFileGetGlobalInfoSize(AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof(AudioFileTypeAndFormatID), ref input, out size) != 0)
            {
                return(null);
            }

            var data = new AudioStreamBasicDescription[size / sizeof(AudioStreamBasicDescription)];

            fixed(AudioStreamBasicDescription *ptr = data)
            {
                var res = AudioFileGetGlobalInfo(AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof(AudioFileTypeAndFormatID), ref input, ref size, ptr);

                if (res != 0)
                {
                    return(null);
                }

                return(data);
            }
        }
Example #2
0
 public static AudioFormatError GetFormatInfo(ref AudioStreamBasicDescription format)
 {
     unsafe {
         var size = sizeof(AudioStreamBasicDescription);
         return(AudioFormatPropertyNative.AudioFormatGetProperty(AudioFormatProperty.FormatInfo, 0, IntPtr.Zero, ref size, ref format));
     }
 }
Example #3
0
        protected override void init(DisposableI parent, Stream stream, int instanceCount, bool looped, Loader.LoadedCallbackMethod loadedCallback)
        {
            base.init(parent, stream, instanceCount, looped, loadedCallback);

            try
            {
                audio = parent.FindParentOrSelfWithException<Audio>();
                audio.UpdateCallback += Update;
                this.data = base.data;
                this.channels = base.channels;
                this.bitDepth = base.bitDepth;

                desc = AudioUnitUtils.AUCanonicalASBD(sampleRate, channels);
                desc.FormatFlags = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved);

                for (int i = 0; i != instanceCount; ++i)
                {
                    inactiveInstances.AddLast(new SoundWAVInstance(this, looped));
                }
            }
            catch (Exception e)
            {
                FailedToLoad = true;
                Loader.AddLoadableException(e);
                Dispose();
                if (loadedCallback != null) loadedCallback(this, false);
                return;
            }

            Loaded = true;
            if (loadedCallback != null) loadedCallback(this, true);
        }
Example #4
0
 extern static OSStatus AudioQueueNewInput(
     ref AudioStreamBasicDescription format,
     AudioQueueInputCallback callback,
     IntPtr inUserData,
     IntPtr cfrunLoop_inCallbackRunLoop,
     IntPtr cfstringref_inCallbackRunLoopMode,
     UInt32 inFlags,
     out IntPtr audioQueue);
Example #5
0
        public void SetOfflineRenderFormat(AudioStreamBasicDescription desc, AudioChannelLayout layout)
        {
            int size;
            var h = AudioFile.AudioChannelLayoutToBlock(layout, out size);

            AudioQueueSetOfflineRenderFormat(handle, ref resc, h);
            Marshal.FreeHGlobal(h);
        }
Example #6
0
		public static ExtAudioFile GetExtAudioFile (NSUrl url, out AudioStreamBasicDescription audioDescription)
		{
			// Notice the following line that we can not pass a NSUrl to a CFUrl
			//ExtAudioFile ext = ExtAudioFile.OpenUrl(url);

			// Basic Descriptions
			AudioStreamBasicDescription fileFormat;
			AudioStreamBasicDescription outputFormat;

			// So now we create a CFUrl
			CFUrl curl = CFUrl.FromFile (url.Path);

			// Open the file
			ExtAudioFile ext = ExtAudioFile.OpenUrl (curl);

			// Get the audio format
			fileFormat = ext.FileDataFormat;

			// Don't know how to handle sounds with more than 2 channels (i.e. stereo)
			// Remember that OpenAL sound effects must be mono to be spatialized anyway.
			if (fileFormat.ChannelsPerFrame > 2) {
#if DEBUG				
				Console.WriteLine ("Unsupported Format: Channel count [0] is greater than stereo.", fileFormat.ChannelsPerFrame);
#endif
                audioDescription = new AudioStreamBasicDescription();
				return null;
			}

			// The output format must be linear PCM because that's the only type OpenAL knows how to deal with.
			// Set the client format to 16 bit signed integer (native-endian) data because that is the most
			// optimal format on iPhone/iPod Touch hardware.
			// Maintain the channel count and sample rate of the original source format.
			outputFormat = new AudioStreamBasicDescription ();	// Create our output format description to be converted to
			outputFormat.SampleRate = fileFormat.SampleRate;	// Preserve the original sample rate
			outputFormat.ChannelsPerFrame = fileFormat.ChannelsPerFrame;	// Preserve the orignal number of channels
			outputFormat.Format = AudioFormatType.LinearPCM;	// We want Linear PCM

			// IsBigEndian is causing some problems with distorted sounds on MacOSX
//			outputFormat.FormatFlags = AudioFormatFlags.IsBigEndian
//							| AudioFormatFlags.IsPacked
//							| AudioFormatFlags.IsSignedInteger;
			
			outputFormat.FormatFlags = AudioFormatFlags.IsPacked
							| AudioFormatFlags.IsSignedInteger;
			outputFormat.FramesPerPacket = 1;	// We know for linear PCM, the definition is 1 frame per packet
			outputFormat.BitsPerChannel = 16;	// We know we want 16-bit
			outputFormat.BytesPerPacket = 2 * outputFormat.ChannelsPerFrame;	// We know we are using 16-bit, so 2-bytes per channel per frame
			outputFormat.BytesPerFrame = 2 * outputFormat.ChannelsPerFrame;		// For PCM, since 1 frame is 1 packet, it is the same as mBytesPerPacket

			// Set the desired client (output) data format
			ext.ClientDataFormat = outputFormat;

			// Copy the output format to the audio description that was passed in so the
			// info will be returned to the user.
			audioDescription = outputFormat;

			return ext;
		}
Example #7
0
        public static AudioFile Create(string url, AudioFileType fileType, AudioStreamBasicDescription format, AudioFileFlags inFlags)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            using (CFUrl cfurl = CFUrl.FromUrlString(url, null))
                return(Create(cfurl, fileType, format, inFlags));
        }
Example #8
0
        public AudioQueueStatus SetOfflineRenderFormat(AudioStreamBasicDescription desc, AudioChannelLayout layout)
        {
            int size;
            var h = layout == null ? IntPtr.Zero : AudioChannelLayout.ToBlock(layout, out size);

            try {
                return(AudioQueueSetOfflineRenderFormat(handle, ref desc, h));
            } finally {
                Marshal.FreeHGlobal(h);
            }
        }
Example #9
0
        public static AudioConverter Create(AudioStreamBasicDescription sourceFormat, AudioStreamBasicDescription destinationFormat, out AudioConverterError error)
        {
            IntPtr ptr = new IntPtr();

            error = AudioConverterNew(ref sourceFormat, ref destinationFormat, ref ptr);
            if (error != AudioConverterError.None)
            {
                return(null);
            }

            return(new AudioConverter(ptr, true));
        }
Example #10
0
        public static AudioStreamBasicDescription CreateLinearPCM(double sampleRate = 441000, uint channelsPerFrame = 2, uint bitsPerChannel = 16)
        {
            var desc = new AudioStreamBasicDescription(AudioFormatType.LinearPCM);

            desc.SampleRate       = sampleRate;
            desc.ChannelsPerFrame = (int)channelsPerFrame;
            desc.BitsPerChannel   = (int)bitsPerChannel;
            desc.BytesPerPacket   = desc.BytesPerFrame = (int)channelsPerFrame * sizeof(Int16);
            desc.FramesPerPacket  = 1;
            desc.FormatFlags      = AudioFormatFlags.IsBigEndian | AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsPacked;

            return(desc);
        }
Example #11
0
        protected void Initialize(AudioFileType inFileType, AudioStreamBasicDescription format)
        {
            IntPtr h;

            gch = GCHandle.Alloc(this);
            var code = AudioFileInitializeWithCallbacks(GCHandle.ToIntPtr(gch), dRead, dWrite, dGetSize, dSetSize, inFileType, ref format, 0, out h);

            if (code == 0)
            {
                handle = h;
                return;
            }
            throw new Exception(String.Format("Unable to create AudioSource, code: 0x{0:x}", code));
        }
Example #12
0
        public void SetAudioFormat(MonoMac.AudioToolbox.AudioStreamBasicDescription audioFormat, AudioUnitScopeType scope, uint audioUnitElement = 0)
        {
            int err = AudioUnitSetProperty(handle,
                                           AudioUnitPropertyIDType.StreamFormat,
                                           scope,
                                           audioUnitElement,
                                           ref audioFormat,
                                           (uint)Marshal.SizeOf(audioFormat));

            if (err != 0)
            {
                throw new AudioUnitException(err);
            }
        }
Example #13
0
        public static AudioFile Create(NSUrl url, AudioFileType fileType, AudioStreamBasicDescription format, AudioFileFlags inFlags)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            IntPtr h;

            if (AudioFileCreateWithURL(url.Handle, fileType, ref format, inFlags, out h) == 0)
            {
                return(new AudioFile(h));
            }
            return(null);
        }
Example #14
0
        public static AudioConverter Create(AudioStreamBasicDescription sourceFormat, AudioStreamBasicDescription destinationFormat, AudioClassDescription[] descriptions)
        {
            if (descriptions == null)
            {
                throw new ArgumentNullException("descriptions");
            }

            IntPtr ptr = new IntPtr();
            var    res = AudioConverterNewSpecific(ref sourceFormat, ref destinationFormat, descriptions.Length, ref descriptions, ref ptr);

            if (res != AudioConverterError.None)
            {
                return(null);
            }

            return(new AudioConverter(ptr, true));
        }
Example #15
0
        public OutputAudioQueue(AudioStreamBasicDescription desc, CFRunLoop runLoop, CFString runMode)
        {
            IntPtr   h;
            GCHandle gch = GCHandle.Alloc(this);

            var code = AudioQueueNewOutput(ref desc, dOutputCallback, GCHandle.ToIntPtr(gch),
                                           runLoop == null ? IntPtr.Zero : runLoop.Handle,
                                           runMode == null ? IntPtr.Zero : runMode.Handle, 0, out h);

            if (code != 0)
            {
                gch.Free();
                throw new AudioQueueException(code);
            }

            this.gch = gch;
            handle   = h;
        }
Example #16
0
        public unsafe static int[] GetAvailableEncodeNumberChannels(AudioStreamBasicDescription format)
        {
            uint size;

            if (AudioFormatPropertyNative.AudioFormatGetPropertyInfo(AudioFormatProperty.AvailableEncodeNumberChannels, sizeof(AudioStreamBasicDescription), ref format, out size) != 0)
            {
                return(null);
            }

            var data = new int[size / sizeof(int)];

            fixed(int *ptr = data)
            {
                var res = AudioFormatPropertyNative.AudioFormatGetProperty(AudioFormatProperty.AvailableEncodeNumberChannels, sizeof(AudioStreamBasicDescription), ref format, ref size, ptr);

                if (res != 0)
                {
                    return(null);
                }

                return(data);
            }
        }
Example #17
0
        public InputAudioQueue(AudioStreamBasicDescription desc, CFRunLoop runLoop, string runMode)
        {
            IntPtr   h;
            GCHandle mygch = GCHandle.Alloc(this);
            CFString s     = runMode == null ? null : new CFString(runMode);

            var code = AudioQueueNewInput(ref desc, dInputCallback, GCHandle.ToIntPtr(mygch),
                                          runLoop == null ? IntPtr.Zero : runLoop.Handle,
                                          s == null ? IntPtr.Zero : s.Handle, 0, out h);

            if (s != null)
            {
                s.Dispose();
            }

            if (code == 0)
            {
                handle = h;
                gch    = mygch;
                return;
            }
            gch.Free();
            throw new AudioQueueException(code);
        }
Example #18
0
 public static AudioStreamBasicDescription AUCanonicalASBD(double sampleRate, int channel)
 {
     // setting AudioStreamBasicDescription
     int AudioUnitSampleTypeSize =
     #if !MONOMAC
     (MonoMac.ObjCRuntime.Runtime.Arch == MonoTouch.ObjCRuntime.Arch.SIMULATOR) ? sizeof(float) : sizeof(int);
     #else
     sizeof (float);
     #endif
     AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription()
     {
         SampleRate = sampleRate,
         Format = AudioFormatType.LinearPCM,
         //kAudioFormatFlagsAudioUnitCanonical = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | (SampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift),
         FormatFlags      = (AudioFormatFlags)((int)AudioFormatFlags.IsSignedInteger | (int)AudioFormatFlags.IsPacked | (int)AudioFormatFlags.IsNonInterleaved | (int)(SampleFractionBits << (int)AudioFormatFlags.LinearPCMSampleFractionShift)),
         ChannelsPerFrame = channel,
         BytesPerPacket   = AudioUnitSampleTypeSize,
         BytesPerFrame    = AudioUnitSampleTypeSize,
         FramesPerPacket  = 1,
         BitsPerChannel   = 8 * AudioUnitSampleTypeSize,
         Reserved = 0
     };
     return audioFormat;
 }
Example #19
0
 public AudioSource(AudioFileType inFileType, AudioStreamBasicDescription format) : base(true)
 {
     Initialize(inFileType, format);
 }
Example #20
0
 extern static OSStatus AudioFileInitializeWithCallbacks(
     IntPtr inClientData, ReadProc inReadFunc, WriteProc inWriteFunc, GetSizeProc inGetSizeFunc, SetSizeProc inSetSizeFunc,
     AudioFileType inFileType, ref AudioStreamBasicDescription format, uint flags, out IntPtr id);
		public static AudioStreamBasicDescription[] GetAvailableStreamDescriptions (AudioFileType fileType, AudioFormatType formatType)
		{
			AudioFileTypeAndFormatID input;
			input.FileType = fileType;
			input.FormatType = formatType;

			uint size;
			if (AudioFileGetGlobalInfoSize (AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof (AudioFileTypeAndFormatID), ref input, out size) != 0)
				return null;

			var data = new AudioStreamBasicDescription[size / sizeof (AudioStreamBasicDescription)];
			fixed (AudioStreamBasicDescription* ptr = data) {
				var res = AudioFileGetGlobalInfo (AudioFileGlobalProperty.AvailableStreamDescriptionsForFormat, (uint)sizeof (AudioFileTypeAndFormatID), ref input, ref size, ptr);
				if (res != 0)
					return null;

				return data;
			}
		}
		extern static int AudioFileGetGlobalInfo (AudioFileGlobalProperty propertyID, uint size, ref AudioFileTypeAndFormatID inSpecifier, ref uint ioDataSize, AudioStreamBasicDescription* outPropertyData);
Example #23
0
		public static AudioFormatError GetFormatInfo (ref AudioStreamBasicDescription format)
		{
			unsafe {
				var size = sizeof (AudioStreamBasicDescription);
				return AudioFormatPropertyNative.AudioFormatGetProperty (AudioFormatProperty.FormatInfo, 0, IntPtr.Zero, ref size, ref format);
			}
		}
Example #24
0
        public static AudioConverter Create(AudioStreamBasicDescription sourceFormat, AudioStreamBasicDescription destinationFormat)
        {
            AudioConverterError res;

            return(Create(sourceFormat, destinationFormat, out res));
        }
Example #25
0
		public static AudioStreamBasicDescription CreateLinearPCM (double sampleRate = 44100, uint channelsPerFrame = 2, uint bitsPerChannel = 16, bool bigEndian = false)
		{
			var desc = new AudioStreamBasicDescription (AudioFormatType.LinearPCM);
			desc.SampleRate = sampleRate;
			desc.ChannelsPerFrame = (int) channelsPerFrame;
			desc.BitsPerChannel = (int) bitsPerChannel;
			desc.BytesPerPacket = desc.BytesPerFrame = (int) channelsPerFrame * sizeof (Int16);
			desc.FramesPerPacket = 1;
			desc.FormatFlags = AudioFormatFlags.IsSignedInteger | AudioFormatFlags.IsPacked;
			if (bigEndian)
				desc.FormatFlags |= AudioFormatFlags.IsBigEndian;

			return desc;
		}
Example #26
0
		public static bool GetDataFromExtAudioFile (ExtAudioFile ext, AudioStreamBasicDescription outputFormat, int maxBufferSize,
		                                       byte[] dataBuffer, out int dataBufferSize, out ALFormat format, out double sampleRate)
		{
			int errorStatus = 0;
			int bufferSizeInFrames = 0;
			dataBufferSize = 0;
			format = ALFormat.Mono16;
			sampleRate = 0;
			/* Compute how many frames will fit into our max buffer size */
			bufferSizeInFrames = maxBufferSize / outputFormat.BytesPerFrame;

			if (dataBuffer != null) {
				MutableAudioBufferList audioBufferList = new MutableAudioBufferList (1, maxBufferSize);

				audioBufferList.Buffers [0].DataByteSize = maxBufferSize;
				audioBufferList.Buffers [0].NumberChannels = outputFormat.ChannelsPerFrame;



				// This a hack so if there is a problem speak to kjpou1 -Kenneth
				// the cleanest way is to copy the buffer to the pointer already allocated
				// but what we are going to do is replace the pointer with our own and restore it later
				//
				GCHandle meBePinned = GCHandle.Alloc (dataBuffer, GCHandleType.Pinned);
				IntPtr meBePointer = meBePinned.AddrOfPinnedObject ();

				// Let's not use copy for right now while we test this.  For very large files this
				//  might show some stutter in the sound loading
				//Marshal.Copy(dataBuffer, 0, audioBufferList.Buffers[0].Data, maxBufferSize);
				IntPtr savedDataPtr = audioBufferList.Buffers [0].Data;
				audioBufferList.Buffers [0].Data = meBePointer;


				try {
					// Read the data into an AudioBufferList
					// errorStatus here returns back the amount of information read
					errorStatus = ext.Read (bufferSizeInFrames, audioBufferList);
					if (errorStatus >= 0) {
						/* Success */
						/* Note: 0 == bufferSizeInFrames is a legitimate value meaning we are EOF. */

						/* ExtAudioFile.Read returns the number of frames actually read.
						 * Need to convert back to bytes.
						 */
						dataBufferSize = bufferSizeInFrames * outputFormat.BytesPerFrame;

						// Now we set our format
						format = outputFormat.ChannelsPerFrame > 1 ? ALFormat.Stereo16 : ALFormat.Mono16;

						sampleRate = outputFormat.SampleRate;
					} else {
#if DEBUG						
						Console.WriteLine ("ExtAudioFile.Read failed, Error = " + errorStatus);
#endif
						return false;
					}
				} catch (Exception exc) {
#if DEBUG
					Console.WriteLine ("ExtAudioFile.Read failed: " + exc.Message);
#endif
					return false;
				} finally {
					// Don't forget to free our dataBuffer memory pointer that was pinned above
					meBePinned.Free ();
					// and restore what was allocated to beginwith
					audioBufferList.Buffers[0].Data = savedDataPtr;
				}


			}
			return true;
		}
Example #27
0
 static extern AudioConverterError AudioConverterNewSpecific(ref AudioStreamBasicDescription inSourceFormat, ref AudioStreamBasicDescription inDestinationFormat,
                                                             int inNumberClassDescriptions, ref AudioClassDescription[] inClassDescriptions, ref IntPtr outAudioConverter);
Example #28
0
 static extern AudioConverterError AudioConverterNew(ref AudioStreamBasicDescription inSourceFormat, ref AudioStreamBasicDescription inDestinationFormat, ref IntPtr outAudioConverter);
Example #29
0
 static extern int AudioUnitGetProperty(IntPtr inUnit,
                                        [MarshalAs(UnmanagedType.U4)] AudioUnitPropertyIDType inID,
                                        [MarshalAs(UnmanagedType.U4)] AudioUnitScopeType inScope,
                                        [MarshalAs(UnmanagedType.U4)] uint inElement,
                                        ref MonoMac.AudioToolbox.AudioStreamBasicDescription outData,
                                        ref uint ioDataSize);
Example #30
0
 public OutputAudioQueue(AudioStreamBasicDescription desc, CFRunLoop runLoop, string runMode)
     : this(desc, runLoop, runMode == null ? null : new CFString(runMode))
 {
 }
Example #31
0
 extern static OSStatus AudioFileCreateWithURL(IntPtr cfurlref_infile, AudioFileType inFileType, ref AudioStreamBasicDescription inFormat, AudioFileFlags inFlags, out AudioFileID file_id);
Example #32
0
 extern static AudioQueueStatus AudioQueueSetOfflineRenderFormat(IntPtr aq, ref AudioStreamBasicDescription format, IntPtr layout);
Example #33
0
		public MonoMac.AudioToolbox.AudioStreamBasicDescription GetAudioFormat(AudioUnitScopeType scope, uint audioUnitElement)
		{
			MonoMac.AudioToolbox.AudioStreamBasicDescription audioFormat = new AudioStreamBasicDescription();
			uint size = (uint)Marshal.SizeOf(audioFormat);
			
			int err = AudioUnitGetProperty(handle,
						       AudioUnitPropertyIDType.StreamFormat,
						       scope,
						       audioUnitElement,
						       ref audioFormat,
						       ref size);
			if (err != 0)
				throw new AudioUnitException (err);
			
			return audioFormat;
		}
Example #34
0
		public unsafe static AudioChannelLayoutTag[] GetAvailableEncodeChannelLayoutTags (AudioStreamBasicDescription format)
		{
			var type_size = sizeof (AudioStreamBasicDescription);
			uint size;
			if (AudioFormatPropertyNative.AudioFormatGetPropertyInfo (AudioFormatProperty.AvailableEncodeChannelLayoutTags, type_size, ref format, out size) != 0)
				return null;

			var data = new AudioChannelLayoutTag[size / sizeof (AudioChannelLayoutTag)];
			fixed (AudioChannelLayoutTag* ptr = data) {
				var res = AudioFormatPropertyNative.AudioFormatGetProperty (AudioFormatProperty.AvailableEncodeChannelLayoutTags, type_size, ref format, ref size, (int*)ptr);
				if (res != 0)
					return null;

				return data;
			}
		}
Example #35
0
 public OutputAudioQueue(AudioStreamBasicDescription desc) : this(desc, null, (CFString)null)
 {
 }
Example #36
0
        public unsafe static                    AudioChannelLayoutTag[] GetAvailableEncodeChannelLayoutTags(AudioStreamBasicDescription format)
        {
            var  type_size = sizeof(AudioStreamBasicDescription);
            uint size;

            if (AudioFormatPropertyNative.AudioFormatGetPropertyInfo(AudioFormatProperty.AvailableEncodeChannelLayoutTags, type_size, ref format, out size) != 0)
            {
                return(null);
            }

            var data = new AudioChannelLayoutTag[size / sizeof(AudioChannelLayoutTag)];

            fixed(AudioChannelLayoutTag *ptr = data)
            {
                var res = AudioFormatPropertyNative.AudioFormatGetProperty(AudioFormatProperty.AvailableEncodeChannelLayoutTags, type_size, ref format, ref size, (int *)ptr);

                if (res != 0)
                {
                    return(null);
                }

                return(data);
            }
        }
Example #37
0
 static extern int AudioConverterNew(
     ref MonoMac.AudioToolbox.AudioStreamBasicDescription inSourceFormat,
     ref MonoMac.AudioToolbox.AudioStreamBasicDescription inDestinationFormat,
     IntPtr outAudioConverter);
Example #38
0
        public static AudioFormatError GetFormatInfo(ref AudioStreamBasicDescription format)
        {
            var size = Marshal.SizeOf(format);

            return(AudioFormatPropertyNative.AudioFormatGetProperty(AudioFormatProperty.FormatInfo, 0, IntPtr.Zero, ref size, ref format));
        }
Example #39
0
		public static bool GetDataFromExtAudioFile (ExtAudioFile ext, AudioStreamBasicDescription outputFormat, int maxBufferSize,
		                                       byte[] dataBuffer, out int dataBufferSize, out ALFormat format, out double sampleRate)
		{
			uint errorStatus = 0;
			uint bufferSizeInFrames = 0;
			dataBufferSize = 0;
			format = ALFormat.Mono16;
			sampleRate = 0;
			/* Compute how many frames will fit into our max buffer size */
			bufferSizeInFrames = (uint)(maxBufferSize / outputFormat.BytesPerFrame);

			if (dataBuffer != null) {
				var audioBufferList = new AudioBuffers(maxBufferSize);

				// This a hack so if there is a problem speak to kjpou1 -Kenneth
				// the cleanest way is to copy the buffer to the pointer already allocated
				// but what we are going to do is replace the pointer with our own and restore it later
				//
				GCHandle meBePinned = GCHandle.Alloc (dataBuffer, GCHandleType.Pinned);
				IntPtr meBePointer = meBePinned.AddrOfPinnedObject ();

				audioBufferList.SetData (0, meBePointer);

				try {
					// Read the data into an AudioBufferList
					// errorStatus here returns back the amount of information read
					ExtAudioFileError extAudioFileError = ExtAudioFileError.OK;
					errorStatus = ext.Read (bufferSizeInFrames, audioBufferList, out extAudioFileError);
					if (errorStatus >= 0) {
						/* Success */
						/* Note: 0 == bufferSizeInFrames is a legitimate value meaning we are EOF. */

						/* ExtAudioFile.Read returns the number of frames actually read.
						 * Need to convert back to bytes.
						 */
						dataBufferSize = (int)bufferSizeInFrames * outputFormat.BytesPerFrame;

						// Now we set our format
						format = outputFormat.ChannelsPerFrame > 1 ? ALFormat.Stereo16 : ALFormat.Mono16;

						sampleRate = outputFormat.SampleRate;
					} else {
#if DEBUG						
						Console.WriteLine ("ExtAudioFile.Read failed, Error = " + errorStatus);
#endif
						return false;
					}
				} catch (Exception exc) {
#if DEBUG
					Console.WriteLine ("ExtAudioFile.Read failed: " + exc.Message);
#endif
					return false;
				} finally {
					// Don't forget to free our dataBuffer memory pointer that was pinned above
					meBePinned.Free ();
					// and restore what was allocated to beginwith
					audioBufferList.SetData (0, IntPtr.Zero);
				}


			}
			return true;
		}
Example #40
0
		public unsafe static int[] GetAvailableEncodeNumberChannels (AudioStreamBasicDescription format)
		{
			uint size;
			if (AudioFormatPropertyNative.AudioFormatGetPropertyInfo (AudioFormatProperty.AvailableEncodeNumberChannels, sizeof (AudioStreamBasicDescription), ref format, out size) != 0)
				return null;

			var data = new int[size / sizeof (int)];
			fixed (int* ptr = data) {
				var res = AudioFormatPropertyNative.AudioFormatGetProperty (AudioFormatProperty.AvailableEncodeNumberChannels, sizeof (AudioStreamBasicDescription), ref format, ref size, ptr);
				if (res != 0)
					return null;

				return data;
			}
		}
Example #41
0
 public InputAudioQueue(AudioStreamBasicDescription desc) : this(desc, null, null)
 {
 }
Example #42
0
 static extern int ExtAudioFileCreateWithUrl(IntPtr inURL,
     [MarshalAs(UnmanagedType.U4)] AudioFileType inFileType,
     ref AudioStreamBasicDescription inStreamDesc,
     IntPtr inChannelLayout, //AudioChannelLayout inChannelLayout, AudioChannelLayout results in compilation error (error code 134.)
     UInt32 flags,
     IntPtr outExtAudioFile);
Example #43
0
 extern static OSStatus AudioQueueNewOutput(ref AudioStreamBasicDescription format, AudioQueueOutputCallback callback,
                                            IntPtr userData, IntPtr cfrunLoop_callbackRunloop, IntPtr cfstr_runMode,
                                            uint flags, out IntPtr audioQueue);
 public static _AudioConverter CreateInstance(AudioStreamBasicDescription srcFormat, AudioStreamBasicDescription destFormat)            
 {
     _AudioConverter inst = new _AudioConverter();
     int err_code;
     unsafe{
         IntPtr ptr = inst._audioConverter;
         IntPtr pptr =(IntPtr)(&ptr);
         err_code = AudioConverterNew(ref srcFormat, ref destFormat, pptr);
     }
     if (err_code != 0)
     {
         throw new ArgumentException(String.Format("Error code:{0}", err_code));
     }
     return inst;
 }
Example #45
0
        public static unsafe extern AudioFormatError AudioFormatGetProperty(AudioFormatProperty propertyID, int inSpecifierSize, ref AudioStreamBasicDescription inSpecifier,
			ref uint ioDataSize, int* outPropertyData);
Example #46
0
        public static extern AudioFormatError AudioFormatGetProperty(AudioFormatProperty inPropertyID, int inSpecifierSize, IntPtr inSpecifier, ref int ioPropertyDataSize,
			ref AudioStreamBasicDescription outPropertyData);
Example #47
0
        public static ExtAudioFile CreateWithUrl(MonoMac.CoreFoundation.CFUrl url,
            AudioFileType fileType, 
            AudioStreamBasicDescription inStreamDesc, 
            //AudioChannelLayout channelLayout, 
            AudioFileFlags flag)
        {
            int err;
            IntPtr ptr = new IntPtr();
            unsafe {
                err = ExtAudioFileCreateWithUrl(url.Handle, fileType, ref inStreamDesc, IntPtr.Zero, (uint)flag,
                    (IntPtr)(&ptr));
            }
            if (err != 0)
            {
                throw new ArgumentException(String.Format("Error code:{0}", err));
            }
            if (ptr == IntPtr.Zero)
            {
                throw new InvalidOperationException("Can not get object instance");
            }

            return new ExtAudioFile(ptr);
        }
Example #48
0
        public static extern AudioFormatError AudioFormatGetPropertyInfo(AudioFormatProperty propertyID, int inSpecifierSize, ref AudioStreamBasicDescription inSpecifier,
			out uint outPropertyDataSize);
Example #49
0
 static extern int ExtAudioFileSetProperty(
     IntPtr inExtAudioFile,
     ExtAudioFilePropertyIDType inPropertyID,
     uint ioPropertyDataSize,
     ref AudioStreamBasicDescription outPropertyData);
Example #50
0
		public static bool Convert(string input, string output, AudioFormatType targetFormat, AudioFileType containerType, Microsoft.Xna.Framework.Content.Pipeline.Audio.ConversionQuality quality) {
			CFUrl source = CFUrl.FromFile (input);
			CFUrl dest = CFUrl.FromFile (output);
			var dstFormat = new AudioStreamBasicDescription ();
			var sourceFile = AudioFile.Open (source, AudioFilePermission.Read);
			AudioFormatType outputFormat = targetFormat;
			// get the source data format
			var srcFormat = (AudioStreamBasicDescription)sourceFile.DataFormat;
			var outputSampleRate = 0;
			switch (quality)
			{
			case Microsoft.Xna.Framework.Content.Pipeline.Audio.ConversionQuality.Low:
				outputSampleRate = (int)Math.Max (8000, srcFormat.SampleRate / 2);
					break;
			default:
				outputSampleRate = (int)Math.Max (8000, srcFormat.SampleRate);
				break;
			}

			dstFormat.SampleRate = (outputSampleRate == 0 ? srcFormat.SampleRate : outputSampleRate); // set sample rate
			if (outputFormat == AudioFormatType.LinearPCM) {
				// if the output format is PC create a 16-bit int PCM file format description as an example
				dstFormat.Format = outputFormat;
				dstFormat.ChannelsPerFrame = srcFormat.ChannelsPerFrame;
				dstFormat.BitsPerChannel = 16;
				dstFormat.BytesPerPacket = dstFormat.BytesPerFrame = 2 * dstFormat.ChannelsPerFrame;
				dstFormat.FramesPerPacket = 1;
				dstFormat.FormatFlags = AudioFormatFlags.LinearPCMIsPacked | AudioFormatFlags.LinearPCMIsSignedInteger;
			} else {
				// compressed format - need to set at least format, sample rate and channel fields for kAudioFormatProperty_FormatInfo
				dstFormat.Format = outputFormat;
				dstFormat.ChannelsPerFrame = (outputFormat == AudioFormatType.iLBC ? 1 : srcFormat.ChannelsPerFrame); // for iLBC num channels must be 1

				// use AudioFormat API to fill out the rest of the description
				var fie = AudioStreamBasicDescription.GetFormatInfo (ref dstFormat);
				if (fie != AudioFormatError.None) {
					return false;
				}
			}

			var converter = AudioConverter.Create (srcFormat, dstFormat);
			converter.InputData += HandleInputData;

			// if the source has a cookie, get it and set it on the Audio Converter
			ReadCookie (sourceFile, converter);

			// get the actual formats back from the Audio Converter
			srcFormat = converter.CurrentInputStreamDescription;
			dstFormat = converter.CurrentOutputStreamDescription;

			// if encoding to AAC set the bitrate to 192k which is a nice value for this demo
			// kAudioConverterEncodeBitRate is a UInt32 value containing the number of bits per second to aim for when encoding data
			if (dstFormat.Format == AudioFormatType.MPEG4AAC) {
				uint outputBitRate = 192000; // 192k

				// ignore errors as setting may be invalid depending on format specifics such as samplerate
				try {
					converter.EncodeBitRate = outputBitRate;
				} catch {
				}

				// get it back and print it out
				outputBitRate = converter.EncodeBitRate;
			}

			// create the destination file 
			var destinationFile = AudioFile.Create (dest, containerType, dstFormat, AudioFileFlags.EraseFlags);

			// set up source buffers and data proc info struct
			afio = new AudioFileIO (32768);
			afio.SourceFile = sourceFile;
			afio.SrcFormat = srcFormat;

			if (srcFormat.BytesPerPacket == 0) {
				// if the source format is VBR, we need to get the maximum packet size
				// use kAudioFilePropertyPacketSizeUpperBound which returns the theoretical maximum packet size
				// in the file (without actually scanning the whole file to find the largest packet,
				// as may happen with kAudioFilePropertyMaximumPacketSize)
				afio.SrcSizePerPacket = sourceFile.PacketSizeUpperBound;

				// how many packets can we read for our buffer size?
				afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket;

				// allocate memory for the PacketDescription structures describing the layout of each packet
				afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead];
			} else {
				// CBR source format
				afio.SrcSizePerPacket = srcFormat.BytesPerPacket;
				afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket;
				// allocate memory for the PacketDescription structures describing the layout of each packet
				afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead];
			}

			// set up output buffers
			int outputSizePerPacket = dstFormat.BytesPerPacket; // this will be non-zero if the format is CBR
			const int theOutputBufSize = 32768;
			var outputBuffer = Marshal.AllocHGlobal (theOutputBufSize);
			AudioStreamPacketDescription[] outputPacketDescriptions = null;

			if (outputSizePerPacket == 0) {
				// if the destination format is VBR, we need to get max size per packet from the converter
				outputSizePerPacket = (int)converter.MaximumOutputPacketSize;

			}
			// allocate memory for the PacketDescription structures describing the layout of each packet
			outputPacketDescriptions = new AudioStreamPacketDescription [theOutputBufSize / outputSizePerPacket];
			int numOutputPackets = theOutputBufSize / outputSizePerPacket;

			// if the destination format has a cookie, get it and set it on the output file
			WriteCookie (converter, destinationFile);

			// write destination channel layout
			if (srcFormat.ChannelsPerFrame > 2) {
				WriteDestinationChannelLayout (converter, sourceFile, destinationFile);
			}

			long totalOutputFrames = 0; // used for debugging
			long outputFilePos = 0;
			AudioBuffers fillBufList = new AudioBuffers (1);
			bool error = false;

			// loop to convert data
			while (true) {
				// set up output buffer list
				fillBufList [0] = new AudioBuffer () {
					NumberChannels = dstFormat.ChannelsPerFrame,
					DataByteSize = theOutputBufSize,
					Data = outputBuffer
				};

				// convert data
				int ioOutputDataPackets = numOutputPackets;
				var fe = converter.FillComplexBuffer (ref ioOutputDataPackets, fillBufList, outputPacketDescriptions);
				// if interrupted in the process of the conversion call, we must handle the error appropriately
				if (fe != AudioConverterError.None) {
					error = true;
					break;
				}

				if (ioOutputDataPackets == 0) {
					// this is the EOF conditon
					break;
				}

				// write to output file
				var inNumBytes = fillBufList [0].DataByteSize;

				var we = destinationFile.WritePackets (false, inNumBytes, outputPacketDescriptions, outputFilePos, ref ioOutputDataPackets, outputBuffer);
				if (we != 0) {
					error = true;
					break;
				}

				// advance output file packet position
				outputFilePos += ioOutputDataPackets;

				if (dstFormat.FramesPerPacket != 0) { 
					// the format has constant frames per packet
					totalOutputFrames += (ioOutputDataPackets * dstFormat.FramesPerPacket);
				} else {
					// variable frames per packet require doing this for each packet (adding up the number of sample frames of data in each packet)
					for (var i = 0; i < ioOutputDataPackets; ++i)
						totalOutputFrames += outputPacketDescriptions [i].VariableFramesInPacket;
				}

			}

			Marshal.FreeHGlobal (outputBuffer);

			if (!error) {
				// write out any of the leading and trailing frames for compressed formats only
				if (dstFormat.BitsPerChannel == 0) {
					// our output frame count should jive with
					WritePacketTableInfo (converter, destinationFile);
				}

				// write the cookie again - sometimes codecs will update cookies at the end of a conversion
				WriteCookie (converter, destinationFile);
			}

			converter.Dispose ();
			destinationFile.Dispose ();
			sourceFile.Dispose ();

			return true;
		}