void iRenderer.decodeInitial(iAudioThread decoder) { int avail = handle.availableFrames; if (avail < samplesPerFrame) { throw new ApplicationException(); } int samples = samplesPerFrame; var span = handle.memoryMap(out int offset, ref samples); TimeSpan ts = decoder.decodeFrame(span); handle.memoryCommit(offset, samples); queue.enqueue(ts); }
void iRenderer.handlePollResult(iAudioThread decoder, ReadOnlySpan <pollfd> pollHandles) { // In mpeg4 sample = what's in the sample rate, frame is much larger unit in the container, after decoding 1 frame can be 1024 samples = 4kb data = 21.33ms of audio. // In ALSA, frame = what's in sample rate = 4 bytes for stereo, sample = 16 bit in out case, and what mpeg4 calls "frame" ALSA developers call "period". // This code integrate them two together :-( ePollResult pollResult = handle.getPollResult(pollHandles, pollHandlesCount); int encodedFrames = decoder.encodedFrames; if (pollResult == ePollResult.None || 0 == encodedFrames) { // Nothing to do here: either no encoded frames, or no space for decoded ones return; } int alsaAvailableFrames = handle.availableFrames / samplesPerFrame; if (alsaAvailableFrames <= 0) { return; } TimeSpan?playedFrame = queue.update(alsaAvailableFrames); if (playedFrame.HasValue) { decoder.updateTimestamp(playedFrame.Value); } if (state == eState.Paused) { int samples = alsaAvailableFrames * samplesPerFrame; var span = handle.memoryMap(out int offset, ref samples); if (0 != samples % samplesPerFrame) { throw new ApplicationException($"ALSA mapped { samples } samples, incomplete count of frames"); } int framesToCommit = samples / samplesPerFrame; for (int p = 0; p < framesToCommit; p++) { span.Slice(p * samplesPerFrame * 2, samplesPerFrame * 2).Fill(0); queue.enqueueSilence(); } handle.memoryCommit(offset, samples); return; } int enqueuedFrames = 0; while (true) { int framesToCommit = Math.Min(encodedFrames, alsaAvailableFrames); if (framesToCommit <= 0) { break; } int samples = framesToCommit * samplesPerFrame; var span = handle.memoryMap(out int offset, ref samples); if (0 != samples % samplesPerFrame) { throw new ApplicationException($"ALSA mapped { samples } samples, incomplete count of frames"); } framesToCommit = samples / samplesPerFrame; for (int p = 0; p < framesToCommit; p++) { TimeSpan ts = decoder.decodeFrame(span.Slice(p * samplesPerFrame * 2)); queue.enqueue(ts); } handle.memoryCommit(offset, samples); alsaAvailableFrames -= framesToCommit; encodedFrames -= framesToCommit; enqueuedFrames += framesToCommit; } if (0 == enqueuedFrames) { Logger.logVerbose("Alsa player is idle: encodedFrames {0}, alsaAvailableFrames {1}, state {2}", encodedFrames, alsaAvailableFrames, handle.state); return; } if (state == eState.Prepared && queue.isFull) { handle.start(); state = eState.Playing; Logger.logInfo("Filled Alsa’s buffer and started audio playback; state {0}", handle.state); } /* else if( state == eState.Seek && queue.isFull ) * { * decoder.seekCompleted(); * } */ // else Logger.logVerbose( "Enqueued {0} to Alsa; state {1}", enqueuedFrames.pluralString( "frame" ), handle.state ); }