コード例 #1
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            UIImage greenImage = new UIImage("green_button.png").StretchableImage(12, 0);
            UIImage redImage   = new UIImage("red_button.png").StretchableImage(12, 0);

            startButton.SetBackgroundImage(greenImage, UIControlState.Normal);
            startButton.SetBackgroundImage(redImage, UIControlState.Disabled);

            // default output format
            // sample rate of 0 indicates source file sample rate
            outputFormat = AudioFormatType.AppleLossless;
            sampleRate   = 0;

            // can we encode to AAC?
            if (IsAACHardwareEncoderAvailable())
            {
                outputFormatSelector.SetEnabled(true, 0);
            }
            else
            {
                // even though not enabled in IB, this segment will still be enabled
                // if not specifically turned off here which we'll assume is a bug
                outputFormatSelector.SetEnabled(false, 0);
            }

            sourceURL = CFUrl.FromFile("sourcePCM.aif");
            var paths = NSSearchPath.GetDirectories(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomain.User);

            destinationFilePath = paths[0] + "/output.caf";
            destinationURL      = NSUrl.FromFilename(destinationFilePath);

            UpdateFormatInfo(fileInfo, sourceURL);
        }
コード例 #2
0
        public static CFHTTPMessage CreateRequest(Uri uri, string method, Version version)
        {
            if (uri == null)
            {
                throw new ArgumentNullException("uri");
            }

            CFUrl    urlRef    = null;
            NSString methodRef = null;

            var escaped = Uri.EscapeUriString(uri.ToString());

            try {
                urlRef = CFUrl.FromUrlString(escaped, null);
                if (urlRef == null)
                {
                    throw new ArgumentException("Invalid URL.");
                }
                methodRef = new NSString(method);

                return(CreateRequest(urlRef, methodRef, version));
            } finally {
                if (urlRef != null)
                {
                    urlRef.Dispose();
                }
                if (methodRef != null)
                {
                    methodRef.Dispose();
                }
            }
        }
コード例 #3
0
        public ExtAudioBufferPlayer(CFUrl url)
        {
            _url = url;

            prepareExtAudioFile();
            prepareAudioUnit();
        }
コード例 #4
0
ファイル: ExtAudioFile.cs プロジェクト: ye-man/xamarin-macios
        public static ExtAudioFile CreateWithUrl(CFUrl url,
                                                 AudioFileType fileType,
                                                 AudioStreamBasicDescription inStreamDesc,
                                                 //AudioChannelLayout channelLayout,
                                                 AudioFileFlags flag)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            ExtAudioFileError err;
            var audioFile = CreateWithUrl(url.Handle, fileType, inStreamDesc, flag, out err);

            if (err != ExtAudioFileError.OK)             // if (err != 0)  <- to keep old implementation
            {
                throw new ArgumentException(String.Format("Error code:{0}", err));
            }
            if (audioFile == null)             // if (ptr == IntPtr.Zero)  <- to keep old implementation
            {
                throw new InvalidOperationException("Can not get object instance");
            }

            return(audioFile);
        }
コード例 #5
0
		public ExtAudioBufferPlayer (CFUrl url)
		{
			_url = url;

			prepareExtAudioFile ();
			prepareAudioUnit ();
		}
コード例 #6
0
ファイル: ExtAudioFile.cs プロジェクト: cwensley/monomac
        public static ExtAudioFile CreateWithUrl(CFUrl url,
                                                 AudioFileType fileType,
                                                 AudioStreamBasicDescription inStreamDesc,
                                                 //AudioChannelLayout channelLayout,
                                                 AudioFileFlags flag)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            int    err;
            IntPtr ptr = new IntPtr();

            unsafe {
                err = ExtAudioFileCreateWithUrl(url.Handle, fileType, ref inStreamDesc, IntPtr.Zero, (uint)flag,
                                                (IntPtr)(&ptr));
            }
            if (err != 0)
            {
                throw new ArgumentException(String.Format("Error code:{0}", err));
            }
            if (ptr == IntPtr.Zero)
            {
                throw new InvalidOperationException("Can not get object instance");
            }

            return(new ExtAudioFile(ptr));
        }
コード例 #7
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();

            var url = CFUrl.FromFile("loop_stereo.aif");

            _player = new ExtAudioBufferPlayer(url);

            // setting audio session
            _slider.ValueChanged += new EventHandler(_slider_ValueChanged);

            _slider.MaxValue = _player.TotalFrames;

            _isTimerAvailable = true;
            _timer            = NSTimer.CreateRepeatingTimer(TimeSpan.FromMilliseconds(100),
                                                             delegate
            {
                if (_isTimerAvailable)
                {
                    long pos               = _player.CurrentPosition;
                    _slider.Value          = pos;
                    _signalLevelLabel.Text = _player.SignalLevel.ToString("0.00E0");
                }
            }
                                                             );

            NSRunLoop.Current.AddTimer(_timer, NSRunLoopMode.Default);
        }
コード例 #8
0
ファイル: OpenALSupport.cs プロジェクト: SongArc/MonoGame
        public static ExtAudioFile GetExtAudioFile(NSUrl url, out AudioStreamBasicDescription audioDescription)
        {
            // Notice the following line that we can not pass a NSUrl to a CFUrl
            //ExtAudioFile ext = ExtAudioFile.OpenUrl(url);

            // Basic Descriptions
            AudioStreamBasicDescription fileFormat;
            AudioStreamBasicDescription outputFormat;

            // So now we create a CFUrl
            CFUrl curl = CFUrl.FromFile(url.Path);

            // Open the file
            ExtAudioFile ext = ExtAudioFile.OpenUrl(curl);

            // Get the audio format
            fileFormat = ext.FileDataFormat;

            // Don't know how to handle sounds with more than 2 channels (i.e. stereo)
            // Remember that OpenAL sound effects must be mono to be spatialized anyway.
            if (fileFormat.ChannelsPerFrame > 2)
            {
#if DEBUG
                Console.WriteLine("Unsupported Format: Channel count [0] is greater than stereo.", fileFormat.ChannelsPerFrame);
#endif
                audioDescription = new AudioStreamBasicDescription();
                return(null);
            }

            // The output format must be linear PCM because that's the only type OpenAL knows how to deal with.
            // Set the client format to 16 bit signed integer (native-endian) data because that is the most
            // optimal format on iPhone/iPod Touch hardware.
            // Maintain the channel count and sample rate of the original source format.
            outputFormat                  = new AudioStreamBasicDescription();  // Create our output format description to be converted to
            outputFormat.SampleRate       = fileFormat.SampleRate;              // Preserve the original sample rate
            outputFormat.ChannelsPerFrame = fileFormat.ChannelsPerFrame;        // Preserve the orignal number of channels
            outputFormat.Format           = AudioFormatType.LinearPCM;          // We want Linear PCM

            // IsBigEndian is causing some problems with distorted sounds on MacOSX
//			outputFormat.FormatFlags = AudioFormatFlags.IsBigEndian
//							| AudioFormatFlags.IsPacked
//							| AudioFormatFlags.IsSignedInteger;

            outputFormat.FormatFlags = AudioFormatFlags.IsPacked
                                       | AudioFormatFlags.IsSignedInteger;
            outputFormat.FramesPerPacket = 1;                                 // We know for linear PCM, the definition is 1 frame per packet
            outputFormat.BitsPerChannel  = 16;                                // We know we want 16-bit
            outputFormat.BytesPerPacket  = 2 * outputFormat.ChannelsPerFrame; // We know we are using 16-bit, so 2-bytes per channel per frame
            outputFormat.BytesPerFrame   = 2 * outputFormat.ChannelsPerFrame; // For PCM, since 1 frame is 1 packet, it is the same as mBytesPerPacket

            // Set the desired client (output) data format
            ext.ClientDataFormat = outputFormat;

            // Copy the output format to the audio description that was passed in so the
            // info will be returned to the user.
            audioDescription = outputFormat;

            return(ext);
        }
コード例 #9
0
        public void ClientDataFormat()
        {
            var path = NSBundle.MainBundle.PathForResource("1", "caf", "AudioToolbox");

            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path))) {
                var fmt = file.ClientDataFormat;
            }
        }
コード例 #10
0
        public ExtAudioFilePlayer(CFUrl url)
        {
            _sampleRate = 44100;
            _url = url;

            prepareExtAudioFile();
            prepareAudioUnit();
        }
コード例 #11
0
        public void RetainCountFromFile()
        {
            var path = typeof(int).Assembly.Location;

            using (var url = CFUrl.FromFile(path)) {
                Assert.That(TestRuntime.CFGetRetainCount(url.Handle), Is.EqualTo((nint)1), "RetainCount");
            }
        }
コード例 #12
0
        public void ClientDataFormat()
        {
            var path = Path.GetFullPath(Path.Combine("AudioToolbox", "1.caf"));

            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path))) {
                var fmt = file.ClientDataFormat;
            }
        }
コード例 #13
0
        public ExtAudioFilePlayer(CFUrl url)
        {
            _sampleRate = 44100;
            _url        = url;

            prepareExtAudioFile();
            prepareAudioUnit();
        }
コード例 #14
0
        protected void LoadAudioFile(StreamInfoProvider info)
        {
            // get the path to the file
            string path;

            if (info.IsInternal)
            {
                path = NSBundle.MainBundle.PathForSoundResource(info.Uri);
            }
            else
            {
                // file path is the Uri for user sources
                path = info.Uri;
            }

            using (var url = CFUrl.FromFile(path))
            {
                using (var file = ExtAudioFile.OpenUrl(url))
                {
                    var clientFormat = file.FileDataFormat;
                    clientFormat.FormatFlags       = AudioStreamBasicDescription.AudioFormatFlagsNativeFloat;
                    clientFormat.ChannelsPerFrame  = 1;
                    clientFormat.FramesPerPacket   = 1;
                    clientFormat.BitsPerChannel    = 8 * sizeof(float);
                    clientFormat.BytesPerPacket    =
                        clientFormat.BytesPerFrame = clientFormat.ChannelsPerFrame * sizeof(float);

                    file.ClientDataFormat = clientFormat;

                    double rateRatio = Metronome.SampleRate / clientFormat.SampleRate;

                    var numFrames = file.FileLengthFrames;
                    numFrames = (uint)(numFrames * rateRatio);

                    TotalFrames = numFrames;

                    UInt32 samples  = (uint)(numFrames * clientFormat.ChannelsPerFrame);
                    var    dataSize = (int)(sizeof(uint) * samples);
                    Data = Marshal.AllocHGlobal(dataSize);

                    // set up a AudioBufferList to read data into
                    var bufList = new AudioBuffers(1);
                    bufList[0] = new AudioBuffer
                    {
                        NumberChannels = 1,
                        Data           = Data,
                        DataByteSize   = dataSize
                    };

                    ExtAudioFileError error;
                    file.Read((uint)numFrames, bufList, out error);
                    if (error != ExtAudioFileError.OK)
                    {
                        throw new ApplicationException();
                    }
                }
            }
        }
コード例 #15
0
ファイル: ExtAudioFile.cs プロジェクト: ye-man/xamarin-macios
        public static ExtAudioFile OpenUrl(CFUrl url, out ExtAudioFileError error)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            return(OpenUrl(url.Handle, out error));
        }
コード例 #16
0
ファイル: ExtAudioFile.cs プロジェクト: ye-man/xamarin-macios
        public static ExtAudioFile CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, AudioFileFlags flag, out ExtAudioFileError error)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            return(CreateWithUrl(url.Handle, fileType, inStreamDesc, flag, out error));
        }
コード例 #17
0
        public static ExtAudioFile?OpenUrl(CFUrl url, out ExtAudioFileError error)
        {
            if (url is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(url));
            }

            return(OpenUrl(url.Handle, out error));
        }
コード例 #18
0
        public static ExtAudioFile?CreateWithUrl(CFUrl url, AudioFileType fileType, AudioStreamBasicDescription inStreamDesc, AudioFileFlags flag, out ExtAudioFileError error)
        {
            if (url is null)
            {
                ObjCRuntime.ThrowHelper.ThrowArgumentNullException(nameof(url));
            }

            return(CreateWithUrl(url.Handle, fileType, inStreamDesc, flag, out error));
        }
コード例 #19
0
        public void OpenCFUrlTest()
        {
            var path = NSBundle.MainBundle.PathForResource("1", "caf", "AudioToolbox");
            ExtAudioFileError err;

            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path), out err)) {
                Assert.IsTrue(err == ExtAudioFileError.OK, "OpenCFUrlTest");
                Assert.IsNotNull(file.AudioFile, "OpenCFUrlTest");
            }
        }
コード例 #20
0
        public ExtAudioBufferPlayer(CFUrl url)
        {
            _sampleRate = 44100;
            _url = url;

            _currentFrame = 0;

            prepareExtAudioFile();
            prepareAudioUnit();
        }
コード例 #21
0
        public void OpenCFUrlTest()
        {
            var path = Path.GetFullPath(Path.Combine("AudioToolbox", "1.caf"));
            ExtAudioFileError err;

            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path), out err)) {
                Assert.IsTrue(err == ExtAudioFileError.OK, "OpenCFUrlTest");
                Assert.IsNotNull(file.AudioFile, "OpenCFUrlTest");
            }
        }
コード例 #22
0
        public SamplerInstrumentData(CFUrl fileUrl, InstrumentType instrumentType)
        {
            if (fileUrl == null)
            {
                throw new ArgumentNullException("fileUrl");
            }

            this.FileUrl        = fileUrl;
            this.InstrumentType = instrumentType;
        }
コード例 #23
0
ファイル: AudioFile.cs プロジェクト: zezba9000/maccore
        public static AudioFile Open(string url, AudioFilePermission permissions, AudioFileType fileTypeHint)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            using (CFUrl cfurl = CFUrl.FromUrlString(url, null))
                return(Open(cfurl, permissions, fileTypeHint));
        }
コード例 #24
0
ファイル: AudioFile.cs プロジェクト: zezba9000/maccore
        public static AudioFile Create(string url, AudioFileType fileType, AudioStreamBasicDescription format, AudioFileFlags inFlags)
        {
            if (url == null)
            {
                throw new ArgumentNullException("url");
            }

            using (CFUrl cfurl = CFUrl.FromUrlString(url, null))
                return(Create(cfurl, fileType, format, inFlags));
        }
コード例 #25
0
        public ExtAudioBufferPlayer(CFUrl url)
        {
            _sampleRate = 44100;
            _url        = url;

            _currentFrame = 0;

            prepareExtAudioFile();
            prepareAudioUnit();
        }
コード例 #26
0
ファイル: UrlTest.cs プロジェクト: modulexcite/xamarin-macios
 public void ToString_()
 {
     using (CFUrl url = CFUrl.FromFile("/")) {
         string value = TestRuntime.CheckSystemAndSDKVersion(7, 0) ? "file:///" : "file://localhost/";
         Assert.That(url.ToString(), Is.EqualTo(value), "FromFile");
     }
     using (CFUrl url = CFUrl.FromUrlString("/", null)) {
         Assert.That(url.ToString(), Is.EqualTo("/"), "FromUrlString");
     }
 }
コード例 #27
0
        public void WrapAudioFileID()
        {
            var path = Path.GetFullPath(Path.Combine("AudioToolbox", "1.caf"));

            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path))) {
                Assert.IsNotNull(file.AudioFile, "#1");

                ExtAudioFile f2;
                Assert.AreEqual(ExtAudioFileError.OK, ExtAudioFile.WrapAudioFileID(file.AudioFile.Value, true, out f2));
            }
        }
コード例 #28
0
        public void ClientDataFormat()
        {
#if MONOMAC
            var path = NSBundle.MainBundle.PathForResource("1", "caf", "AudioToolbox");
#else
            var path = Path.GetFullPath(Path.Combine("AudioToolbox", "1.caf"));
#endif
            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path))) {
                var fmt = file.ClientDataFormat;
            }
        }
コード例 #29
0
        public void WrapAudioFileID()
        {
            var path = NSBundle.MainBundle.PathForResource("1", "caf", "AudioToolbox");

            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path))) {
                Assert.IsNotNull(file.AudioFile, "#1");

                ExtAudioFile f2;
                Assert.AreEqual(ExtAudioFileError.OK, ExtAudioFile.WrapAudioFileID(file.AudioFile.Value, true, out f2));
            }
        }
コード例 #30
0
        public static CFHTTPMessage CreateRequest(CFUrl url, NSString method, Version version)
        {
            var handle = CFHTTPMessageCreateRequest(
                IntPtr.Zero, method.Handle, url.Handle, GetVersion(version));

            if (handle == IntPtr.Zero)
            {
                return(null);
            }

            return(new CFHTTPMessage(handle));
        }
コード例 #31
0
        static PlatformHardDriveMediaType GetMediaType(string path)
        {
            IntPtr diskHandle    = IntPtr.Zero;
            IntPtr sessionHandle = IntPtr.Zero;
            IntPtr charDictRef   = IntPtr.Zero;
            uint   service       = 0;

            try {
                sessionHandle = DASessionCreate(IntPtr.Zero);

                // This seems to only work for '/'
                var url = CFUrl.FromFile(path);
                diskHandle = DADiskCreateFromVolumePath(IntPtr.Zero, sessionHandle, url.Handle);
                if (diskHandle == IntPtr.Zero)
                {
                    return(PlatformHardDriveMediaType.Unknown);
                }

                service = DADiskCopyIOMedia(diskHandle);

                var cfStr = new CFString("Device Characteristics");
                charDictRef = IORegistryEntrySearchCFProperty(service, "IOService", cfStr.Handle, IntPtr.Zero, 3);

                // CFDictionary owns the object pointed to by resultHandle, so no need to release it
                var resultHandle = CFDictionaryGetValue(charDictRef, new CFString("Medium Type").Handle);
                if (resultHandle == IntPtr.Zero)
                {
                    return(PlatformHardDriveMediaType.Unknown);
                }
                var resultString = (string)NSString.FromHandle(resultHandle);

                if (resultString == "Solid State")
                {
                    return(PlatformHardDriveMediaType.SolidState);
                }
                else if (resultString == "Rotational")
                {
                    return(PlatformHardDriveMediaType.Rotational);
                }
                else
                {
                    return(PlatformHardDriveMediaType.Unknown);
                }
            } finally {
                if (service != 0)
                {
                    IOObjectRelease(service);
                }
                CFRelease(sessionHandle);
                CFRelease(charDictRef);
            }
        }
コード例 #32
0
        public ExtAudioBufferPlayer(CFUrl url)
        {
            _sampleRate = 44100;
            _url = url;

            _isDone = false;
            _isReverse = false;
            _currentFrame = 0;
            _isLoop = false;

            prepareExtAudioFile();
            prepareAudioUnit();
        }
コード例 #33
0
        public ExtAudioBufferPlayer(CFUrl url)
        {
            _sampleRate = 44100;
            _url        = url;

            _isDone       = false;
            _isReverse    = false;
            _currentFrame = 0;
            _isLoop       = false;

            prepareExtAudioFile();
            prepareAudioUnit();
        }
コード例 #34
0
        public void OpenCFUrlTest()
        {
#if MONOMAC
            var path = NSBundle.MainBundle.PathForResource("1", "caf", "AudioToolbox");
#else
            var path = Path.GetFullPath(Path.Combine("AudioToolbox", "1.caf"));
#endif
            ExtAudioFileError err;
            using (var file = ExtAudioFile.OpenUrl(CFUrl.FromFile(path), out err)) {
                Assert.IsTrue(err == ExtAudioFileError.OK, "OpenCFUrlTest");
                Assert.IsNotNull(file.AudioFile, "OpenCFUrlTest");
            }
        }
コード例 #35
0
		public ExtAudioBufferPlayer(CFUrl url)
		{
			this.url = url;

			AudioSession.Initialize ();
			AudioSession.Category = AudioSessionCategory.PlayAndRecord; // TODO: we need to play only here
			AudioSession.Resumed += OnAudioSessionResumed;

			PrepareAudioUnit ();
			PrepareExtAudioFile();

			audioUnit.Initialize ();
			audioUnit.Start ();
		}
コード例 #36
0
		// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

            // setting button stat
            _recordingButton.Enabled = true;
            _playBackButton.Enabled = false;

            // binding event handlers
            _recordingButton.TouchUpInside += new EventHandler(_recordingButton_TouchCancel);
            _playBackButton.TouchUpInside  += new EventHandler(_playBackButton_TouchDown);

            // getting local sound file path
            var path = Environment.GetFolderPath(Environment.SpecialFolder.Personal);
            path = System.IO.Path.Combine(path, "recording.aiff");
			_url = CFUrl.FromFile(path);
			
            // setting audio session
            AudioSession.Initialize();
            AudioSession.SetActive(true);

            
        }
コード例 #37
0
		unsafe static void RenderAudio (CFUrl sourceUrl, CFUrl destinationUrl)
		{
			AudioStreamBasicDescription dataFormat;
			AudioQueueBuffer *buffer = null;
			long currentPacket = 0;
			int packetsToRead = 0;
			AudioStreamPacketDescription [] packetDescs = null;
			bool flushed = false;
			bool done = false;
			int bufferSize;
			
			using (var audioFile = AudioFile.Open (sourceUrl, AudioFilePermission.Read, (AudioFileType) 0)) {
				dataFormat = audioFile.StreamBasicDescription;
				
				using (var queue = new OutputAudioQueue (dataFormat, CFRunLoop.Current, CFRunLoop.CFRunLoopCommonModes)) {
					queue.OutputCompleted += (sender, e) => 
					{
						HandleOutput (audioFile, queue, buffer, ref packetsToRead, ref currentPacket, ref done, ref flushed, ref packetDescs);
					};
					
					// we need to calculate how many packets we read at a time and how big a buffer we need
					// we base this on the size of the packets in the file and an approximate duration for each buffer
					bool isVBR = dataFormat.BytesPerPacket == 0 || dataFormat.FramesPerPacket == 0;
					
					// first check to see what the max size of a packet is - if it is bigger
					// than our allocation default size, that needs to become larger
					// adjust buffer size to represent about a second of audio based on this format 
					CalculateBytesForTime (dataFormat, audioFile.MaximumPacketSize, 1.0, out bufferSize, out packetsToRead);
				
					if (isVBR) {
						packetDescs = new AudioStreamPacketDescription [packetsToRead];
					} else {
						packetDescs = null; // we don't provide packet descriptions for constant bit rate formats (like linear PCM)
					}
				
					if (audioFile.MagicCookie.Length != 0)
						queue.MagicCookie = audioFile.MagicCookie;
		
					// allocate the input read buffer
					queue.AllocateBuffer (bufferSize, out buffer);
					
					// prepare the capture format
					var captureFormat = AudioStreamBasicDescription.CreateLinearPCM (dataFormat.SampleRate, (uint) dataFormat.ChannelsPerFrame, 32);
					captureFormat.BytesPerFrame = captureFormat.BytesPerPacket = dataFormat.ChannelsPerFrame * 4;

					queue.SetOfflineRenderFormat (captureFormat, audioFile.ChannelLayout);
					
					// prepare the target format
					var dstFormat = AudioStreamBasicDescription.CreateLinearPCM (dataFormat.SampleRate, (uint) dataFormat.ChannelsPerFrame);

					using (var captureFile = ExtAudioFile.CreateWithUrl (destinationUrl, AudioFileType.CAF, dstFormat, AudioFileFlags.EraseFlags)) {
						captureFile.ClientDataFormat = captureFormat;
						
						int captureBufferSize = bufferSize / 2;
						AudioBuffers captureABL = new AudioBuffers (1);
						
						AudioQueueBuffer *captureBuffer;
						queue.AllocateBuffer (captureBufferSize, out captureBuffer);
						
						captureABL[0] = new AudioBuffer () {
							Data = captureBuffer->AudioData,
							NumberChannels = captureFormat.ChannelsPerFrame
						};

						queue.Start ();

						double ts = 0;
						queue.RenderOffline (ts, captureBuffer, 0);
						
						HandleOutput (audioFile, queue, buffer, ref packetsToRead, ref currentPacket, ref done, ref flushed, ref packetDescs);
						
						while (true) {
							int reqFrames = captureBufferSize / captureFormat.BytesPerFrame;
							
							queue.RenderOffline (ts, captureBuffer, reqFrames);

							captureABL.SetData (0, captureBuffer->AudioData, (int) captureBuffer->AudioDataByteSize);
							var writeFrames = captureABL[0].DataByteSize / captureFormat.BytesPerFrame;
							
							// Console.WriteLine ("ts: {0} AudioQueueOfflineRender: req {1} frames / {2} bytes, got {3} frames / {4} bytes", 
							//	ts, reqFrames, captureBufferSize, writeFrames, captureABL.Buffers [0].DataByteSize);
							
							captureFile.WriteAsync ((uint) writeFrames, captureABL);
							
							if (flushed)
								break;
							
							ts += writeFrames;
						}
					
						CFRunLoop.Current.RunInMode (CFRunLoop.CFDefaultRunLoopMode, 1, false);
					}
				}
			}
		}
コード例 #38
0
		AudioUnitStatus LoadFromDLSOrSoundFont (CFUrl bankUrl, int presetNumber)
		{
			var instrumentData = new SamplerInstrumentData (bankUrl, InstrumentType.SF2Preset) {
				PresetID = (byte)presetNumber,
				BankMSB = SamplerInstrumentData.DefaultMelodicBankMSB,
				BankLSB = SamplerInstrumentData.DefaultBankLSB
			};

			var result = samplerUnit.LoadInstrument (instrumentData, AudioUnitScopeType.Global, 0);
			if (result != AudioUnitStatus.NoError)
				Console.WriteLine (result.ToString ());

			return result;
		}
コード例 #39
0
ファイル: MacProxy.cs プロジェクト: yangjunhua/mono
		static CFArray CopyProxiesForAutoConfigurationScript (IntPtr proxyAutoConfigurationScript, CFUrl targetURL)
		{
			IntPtr err = IntPtr.Zero;
			IntPtr native = CFNetworkCopyProxiesForAutoConfigurationScript (proxyAutoConfigurationScript, targetURL.Handle, out err);
			
			if (native == IntPtr.Zero)
				return null;
			
			return new CFArray (native, true);
		}
コード例 #40
0
ファイル: MacProxy.cs プロジェクト: yangjunhua/mono
		public static CFProxy[] GetProxiesForAutoConfigurationScript (IntPtr proxyAutoConfigurationScript, CFUrl targetURL)
		{
			if (proxyAutoConfigurationScript == IntPtr.Zero)
				throw new ArgumentNullException ("proxyAutoConfigurationScript");
			
			if (targetURL == null)
				throw new ArgumentNullException ("targetURL");
			
			CFArray array = CopyProxiesForAutoConfigurationScript (proxyAutoConfigurationScript, targetURL);
			
			if (array == null)
				return null;
			
			CFProxy[] proxies = new CFProxy [array.Count];
			for (int i = 0; i < proxies.Length; i++) {
				CFDictionary dict = new CFDictionary (array[i], false);
				proxies[i] = new CFProxy (dict);
			}

			array.Dispose ();
			
			return proxies;
		}
コード例 #41
0
        public void StartRecording(CFUrl url)
        {
            /*
            //  convertion audio format (AIFF)
            AudioStreamBasicDescription outputFormat = new AudioStreamBasicDescription()
            {
                SampleRate = 44100,
                Format = AudioFormatType.LinearPCM,
                FormatFlags = AudioFormatFlags.IsBigEndian | AudioFormatFlags.LinearPCMIsSignedInteger | AudioFormatFlags.LinearPCMIsPacked,
                FramesPerPacket = 1,
                ChannelsPerFrame = 1,
                BitsPerChannel = 16,
                BytesPerPacket = 2,
                BytesPerFrame = 2,
                Reserved = 0
            };

            _extAudioFile = ExtAudioFile.CreateWithURL(url, AudioFileType.AIFF, outputFormat, AudioFileFlags.EraseFlags);                             
            _extAudioFile.ClientDataFormat = _audioUnitOutputFormat;
            _extAudioFile.Seek(0);

            _isRecording = true;
            */
        }
コード例 #42
0
ファイル: MacProxy.cs プロジェクト: yangjunhua/mono
		public static CFProxy[] GetProxiesForURL (CFUrl url, CFProxySettings proxySettings)
		{
			if (url == null || url.Handle == IntPtr.Zero)
				throw new ArgumentNullException ("url");
			
			if (proxySettings == null)
				proxySettings = GetSystemProxySettings ();
			
			CFArray array = CopyProxiesForURL (url, proxySettings.Dictionary);
			
			if (array == null)
				return null;

			CFProxy[] proxies = new CFProxy [array.Count];
			for (int i = 0; i < proxies.Length; i++) {
				CFDictionary dict = new CFDictionary (array[i], false);
				proxies[i] = new CFProxy (dict);
			}

			array.Dispose ();
			
			return proxies;
		}
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			UIImage greenImage = new UIImage ("green_button.png").StretchableImage (12, 0);
			UIImage redImage = new UIImage ("red_button.png").StretchableImage (12, 0);
			
			startButton.SetBackgroundImage (greenImage, UIControlState.Normal);
			startButton.SetBackgroundImage (redImage, UIControlState.Disabled);

			// default output format
			// sample rate of 0 indicates source file sample rate
			outputFormat = AudioFormatType.AppleLossless;
			sampleRate = 0;

			// can we encode to AAC?
			if (IsAACHardwareEncoderAvailable ()) {
				outputFormatSelector.SetEnabled (true, 0);
			} else {
				// even though not enabled in IB, this segment will still be enabled
				// if not specifically turned off here which we'll assume is a bug
				outputFormatSelector.SetEnabled (false, 0);
			}

			sourceURL = CFUrl.FromFile ("sourcePCM.aif");
			var paths = NSSearchPath.GetDirectories (NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomain.User);
			destinationFilePath = paths[0] +  "/output.caf";
			destinationURL = NSUrl.FromFilename (destinationFilePath);

			UpdateFormatInfo (fileInfo, sourceURL);
		}
		bool DoConvertFile (CFUrl sourceURL, NSUrl destinationURL, AudioFormatType outputFormat, double outputSampleRate)
		{
			AudioStreamBasicDescription dstFormat = new AudioStreamBasicDescription ();

			// in this sample we should never be on the main thread here
			Debug.Assert (!NSThread.IsMain);

			// transition thread state to State::Running before continuing
			AppDelegate.ThreadStateSetRunning ();
			
			Debug.WriteLine ("DoConvertFile");

			// get the source file
			var sourceFile = AudioFile.Open (sourceURL, AudioFilePermission.Read);
			
			// get the source data format
			var srcFormat = (AudioStreamBasicDescription)sourceFile.DataFormat;

			// setup the output file format
			dstFormat.SampleRate = (outputSampleRate == 0 ? srcFormat.SampleRate : outputSampleRate); // set sample rate
			if (outputFormat == AudioFormatType.LinearPCM) {
				// if the output format is PC create a 16-bit int PCM file format description as an example
				dstFormat.Format = outputFormat;
				dstFormat.ChannelsPerFrame = srcFormat.ChannelsPerFrame;
				dstFormat.BitsPerChannel = 16;
				dstFormat.BytesPerPacket = dstFormat.BytesPerFrame = 2 * dstFormat.ChannelsPerFrame;
				dstFormat.FramesPerPacket = 1;
				dstFormat.FormatFlags = AudioFormatFlags.LinearPCMIsPacked | AudioFormatFlags.LinearPCMIsSignedInteger;
			} else {
				// compressed format - need to set at least format, sample rate and channel fields for kAudioFormatProperty_FormatInfo
				dstFormat.Format = outputFormat;
				dstFormat.ChannelsPerFrame = (outputFormat == AudioFormatType.iLBC ? 1 : srcFormat.ChannelsPerFrame); // for iLBC num channels must be 1
				
				// use AudioFormat API to fill out the rest of the description
				var fie = AudioStreamBasicDescription.GetFormatInfo (ref dstFormat);
				if (fie != AudioFormatError.None) {
					Debug.Print ("Cannot create destination format {0:x}", fie);

					AppDelegate.ThreadStateSetDone ();
					return false;
				}
			}

			// create the AudioConverter
			AudioConverterError ce;
			var converter = AudioConverter.Create (srcFormat, dstFormat, out ce);
			Debug.Assert (ce == AudioConverterError.None);

			converter.InputData += EncoderDataProc;

			// if the source has a cookie, get it and set it on the Audio Converter
			ReadCookie (sourceFile, converter);

			// get the actual formats back from the Audio Converter
			srcFormat = converter.CurrentInputStreamDescription;
			dstFormat = converter.CurrentOutputStreamDescription;

			// if encoding to AAC set the bitrate to 192k which is a nice value for this demo
			// kAudioConverterEncodeBitRate is a UInt32 value containing the number of bits per second to aim for when encoding data
			if (dstFormat.Format == AudioFormatType.MPEG4AAC) {
				uint outputBitRate = 192000; // 192k

				// ignore errors as setting may be invalid depending on format specifics such as samplerate
				try {
					converter.EncodeBitRate = outputBitRate;
				} catch {
				}

				// get it back and print it out
				outputBitRate = converter.EncodeBitRate;
				Debug.Print ("AAC Encode Bitrate: {0}", outputBitRate);
			}

			// can the Audio Converter resume conversion after an interruption?
			// this property may be queried at any time after construction of the Audio Converter after setting its output format
			// there's no clear reason to prefer construction time, interruption time, or potential resumption time but we prefer
			// construction time since it means less code to execute during or after interruption time
			bool canResumeFromInterruption;
			try {
				canResumeFromInterruption = converter.CanResumeFromInterruption;
				Debug.Print ("Audio Converter {0} continue after interruption!", canResumeFromInterruption ? "CAN" : "CANNOT");
			} catch (Exception e) {
				// if the property is unimplemented (kAudioConverterErr_PropertyNotSupported, or paramErr returned in the case of PCM),
				// then the codec being used is not a hardware codec so we're not concerned about codec state
				// we are always going to be able to resume conversion after an interruption

				canResumeFromInterruption = false;
				Debug.Print ("CanResumeFromInterruption: {0}", e.Message);
			}
			
			// create the destination file 
			var destinationFile = AudioFile.Create (destinationURL, AudioFileType.CAF, dstFormat, AudioFileFlags.EraseFlags);

			// set up source buffers and data proc info struct
			afio = new AudioFileIO (32768);
			afio.SourceFile = sourceFile;
			afio.SrcFormat = srcFormat;

			if (srcFormat.BytesPerPacket == 0) {
				// if the source format is VBR, we need to get the maximum packet size
				// use kAudioFilePropertyPacketSizeUpperBound which returns the theoretical maximum packet size
				// in the file (without actually scanning the whole file to find the largest packet,
				// as may happen with kAudioFilePropertyMaximumPacketSize)
				afio.SrcSizePerPacket = sourceFile.PacketSizeUpperBound;

				// how many packets can we read for our buffer size?
				afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket;
				
				// allocate memory for the PacketDescription structures describing the layout of each packet
				afio.PacketDescriptions = new AudioStreamPacketDescription [afio.NumPacketsPerRead];
			} else {
				// CBR source format
				afio.SrcSizePerPacket = srcFormat.BytesPerPacket;
				afio.NumPacketsPerRead = afio.SrcBufferSize / afio.SrcSizePerPacket;
			}

			// set up output buffers
			int outputSizePerPacket = dstFormat.BytesPerPacket; // this will be non-zero if the format is CBR
			const int theOutputBufSize = 32768;
			var outputBuffer = Marshal.AllocHGlobal (theOutputBufSize);
			AudioStreamPacketDescription[] outputPacketDescriptions = null;

			if (outputSizePerPacket == 0) {
				// if the destination format is VBR, we need to get max size per packet from the converter
				outputSizePerPacket = (int)converter.MaximumOutputPacketSize;

				// allocate memory for the PacketDescription structures describing the layout of each packet
				outputPacketDescriptions = new AudioStreamPacketDescription [theOutputBufSize / outputSizePerPacket];
			}
			int numOutputPackets = theOutputBufSize / outputSizePerPacket;
			
			// if the destination format has a cookie, get it and set it on the output file
			WriteCookie (converter, destinationFile);
			
			// write destination channel layout
			if (srcFormat.ChannelsPerFrame > 2) {
				WriteDestinationChannelLayout (converter, sourceFile, destinationFile);
			}

			long totalOutputFrames = 0; // used for debugging
			long outputFilePos = 0;
			AudioBuffers fillBufList = new AudioBuffers (1);
			bool error = false;

			// loop to convert data
			Debug.WriteLine ("Converting...");
			while (true) {
				// set up output buffer list
				fillBufList [0] = new AudioBuffer () {
					NumberChannels = dstFormat.ChannelsPerFrame,
					DataByteSize = theOutputBufSize,
					Data = outputBuffer
				};

				// this will block if we're interrupted
				var wasInterrupted = AppDelegate.ThreadStatePausedCheck();
				
				if (wasInterrupted && !canResumeFromInterruption) {
					// this is our interruption termination condition
					// an interruption has occured but the Audio Converter cannot continue
					Debug.WriteLine ("Cannot resume from interruption");
					error = true;
					break;
				}

				// convert data
				int ioOutputDataPackets = numOutputPackets;
				var fe = converter.FillComplexBuffer (ref ioOutputDataPackets, fillBufList, outputPacketDescriptions);
				// if interrupted in the process of the conversion call, we must handle the error appropriately
				if (fe != AudioConverterError.None) {
					Debug.Print ("FillComplexBuffer: {0}", fe);
					error = true;
					break;
				}

				if (ioOutputDataPackets == 0) {
					// this is the EOF conditon
					break;
				}

				// write to output file
				var inNumBytes = fillBufList [0].DataByteSize;

				var we = destinationFile.WritePackets (false, inNumBytes, outputPacketDescriptions, outputFilePos, ref ioOutputDataPackets, outputBuffer);
				if (we != 0) {
					Debug.Print ("WritePackets: {0}", we);
					error = true;
					break;
				}

				// advance output file packet position
				outputFilePos += ioOutputDataPackets;
					
				if (dstFormat.FramesPerPacket != 0) { 
					// the format has constant frames per packet
					totalOutputFrames += (ioOutputDataPackets * dstFormat.FramesPerPacket);
				} else {
					// variable frames per packet require doing this for each packet (adding up the number of sample frames of data in each packet)
					for (var i = 0; i < ioOutputDataPackets; ++i)
						totalOutputFrames += outputPacketDescriptions [i].VariableFramesInPacket;
				}

			}

			Marshal.FreeHGlobal (outputBuffer);

			if (!error) {
				// write out any of the leading and trailing frames for compressed formats only
				if (dstFormat.BitsPerChannel == 0) {
					// our output frame count should jive with
					Debug.Print ("Total number of output frames counted: {0}", totalOutputFrames); 
					WritePacketTableInfo (converter, destinationFile);
				}
					
				// write the cookie again - sometimes codecs will update cookies at the end of a conversion
				WriteCookie (converter, destinationFile);
			}

			converter.Dispose ();
			destinationFile.Dispose ();
			sourceFile.Dispose ();

			// transition thread state to State.Done before continuing
			AppDelegate.ThreadStateSetDone ();

			return !error;
		}
		static void UpdateFormatInfo (UILabel label, CFUrl fileURL)
		{
			UpdateFormatInfo (label, AudioFile.Open (fileURL, AudioFilePermission.Read), fileURL.FileSystemPath);
		}
コード例 #46
0
		public static CFHTTPMessage CreateRequest (CFUrl url, NSString method, Version version)
		{
			if (url == null)
				throw new ArgumentNullException ("url");
			if (method == null)
				throw new ArgumentNullException ("method");

			var handle = CFHTTPMessageCreateRequest (
				IntPtr.Zero, method.Handle, url.Handle, GetVersion (version));
			return new CFHTTPMessage (handle);
		}
コード例 #47
0
ファイル: MacProxy.cs プロジェクト: yangjunhua/mono
		static CFArray CopyProxiesForURL (CFUrl url, CFDictionary proxySettings)
		{
			IntPtr native = CFNetworkCopyProxiesForURL (url.Handle, proxySettings != null ? proxySettings.Handle : IntPtr.Zero);
			
			if (native == IntPtr.Zero)
				return null;
			
			return new CFArray (native, true);
		}
コード例 #48
0
        public AudioQueuePlayer(CFUrl url)
		{            
            _url = url; 
			_isPlaying  = false;
            _isPrepared = false;
        }
コード例 #49
0
		public static CFHTTPMessage CreateRequest (CFUrl url, NSString method, Version version)
		{
			var handle = CFHTTPMessageCreateRequest (
				IntPtr.Zero, method.Handle, url.Handle, GetVersion (version));
			if (handle == IntPtr.Zero)
				return null;

			return new CFHTTPMessage (handle);
		}