public static AudioQueueStatus CheckError(AudioQueueStatus status) { if (status != AudioQueueStatus.Ok) { ReportError(status); } return(status); }
public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions) { // create a new window instance based on the screen size Window = new UIWindow(UIScreen.MainScreen.Bounds); UIViewController controller = new UIViewController(); controller.View.BackgroundColor = UIColor.DarkGray; controller.Title = "Xamarin iOS Debugger"; UINavigationController navController = new UINavigationController(controller); Window.RootViewController = navController; // make the window visible Window.MakeKeyAndVisible(); //var documents = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); //var filename = Path.Combine(documents, "Write.txt"); //File.WriteAllText(filename, "Write this text into a file!"); OutputAudioQueue audioQueue = new OutputAudioQueue(new AudioStreamBasicDescription() { Format = AudioFormatType.MPEG4AAC_HE, BytesPerPacket = 0, BitsPerChannel = 0, Reserved = 0, FormatFlags = 0, BytesPerFrame = 0, //Set this field to 0 for compressed formats. SampleRate = 16000, ChannelsPerFrame = 1, FramesPerPacket = 1024 //for AAC. }); const int BufferCountMax = 1000; const int AudioBufferSize = 1024 * 8; for (int i = 0; i < BufferCountMax; i++) { AudioQueueStatus aqs = audioQueue.AllocateBufferWithPacketDescriptors( AudioBufferSize, 1, out IntPtr ipBuffer ); if (aqs == AudioQueueStatus.Ok) { //_queueAudioOutputBuffers.Enqueue(ipBuffer); //_qFreeAudioOutputBuffers.Enqueue(ipBuffer); byte[] abData = new byte[512]; Marshal.Copy(abData, 0, ipBuffer, abData.Length); } else { Debug.WriteLine("AudioQueueStatus: " + aqs); } } return(true); }
void Restart() { Debug.WriteLine("InternalSoundEffectInstance.Restart()"); AudioQueueStatus status = queue.Start(); Debug.WriteLine("queue status == " + status.ToString()); if (status == (AudioQueueStatus)1752656245) // 'hwiu' - hardware in use { Debug.WriteLine("(hardware in use)"); SoundEffectThread.Enqueue(new SoundEffectInstance.WorkItem(this, DoRetryRestart)); } }
private void CheckAudioQueueStatus(AudioQueueStatus?audioQueueStatus, AudioPlayerState audioPlayerState) { if (audioQueueStatus is AudioQueueStatus audioStatus) { if (audioStatus != _audioQueueStatus) { _audioQueueStatus = audioStatus; } if (_audioQueueStatus == AudioQueueStatus.Ok) { SetAudioPlayerStatus(audioPlayerState); } } }
// THIS HAPPENS ON ANOTHER THREAD unsafe bool HandleOutputBuffer(AudioQueueBuffer *buffer, bool priming) { uint numBytesReadFromFile; uint numPackets; if (!ReadFileIntoBuffer(buffer, out numBytesReadFromFile, out numPackets)) { return(false); // ERROR } if (loop && numPackets == 0) { currentPacket = 0; // Restart from beginning if (!ReadFileIntoBuffer(buffer, out numBytesReadFromFile, out numPackets)) { return(false); // ERROR } } if (numPackets > 0) // have we recieved data? { buffer->AudioDataByteSize = numBytesReadFromFile; AudioQueueStatus status = AudioQueueEnqueueBuffer(queue.Handle, new IntPtr(buffer), (packetDescriptionArray != IntPtr.Zero ? numPackets : 0), packetDescriptionArray); currentPacket += numPackets; } else { if (!priming) // Stop the queue (if priming - queue isn't running anyway) { queue.Stop(false); } return(false); // No audio remains } return(true); // More audio available }
internal AudioQueueException(AudioQueueStatus k) : base(Lookup((int)k)) { ErrorCode = k; }