/** * Checks the video data. * * @return the number of bad frames */ private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) { const int TIMEOUT_USEC = 1000; ByteBuffer[] decoderInputBuffers = decoder.GetInputBuffers(); ByteBuffer[] decoderOutputBuffers = decoder.GetOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int checkIndex = 0; int badFrames = 0; bool outputDone = false; bool inputDone = false; while (!outputDone) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "check loop"); } // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.NumChunks) { // End of stream -- send empty frame with EOS flag set. decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BufferFlagEndOfStream); inputDone = true; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "sent input EOS"); } } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.Clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.QueueInputBuffer(inputBufIndex, 0, inputBuf.Position(), time, (MediaCodecBufferFlags)flags); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.Position() + " flags=" + flags); } inputChunk++; } } else { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "input buffer not available"); } } } if (!outputDone) { int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == (int)MediaCodec.InfoTryAgainLater) { // no output available yet if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "no output from decoder available"); } } else if (decoderStatus == (int)MediaCodec.InfoOutputBuffersChanged) { decoderOutputBuffers = decoder.GetOutputBuffers(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output buffers changed"); } } else if (decoderStatus == (int)MediaCodec.InfoOutputFormatChanged) { MediaFormat newFormat = decoder.OutputFormat; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output format changed: " + newFormat); } } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else // decoderStatus >= 0 { ByteBuffer decodedData = decoderOutputBuffers[decoderStatus]; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.Size + ")"); } if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "output EOS"); } outputDone = true; } bool doRender = (info.Size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't guarantee // that the texture will be available before the call returns, so we // need to wait for the onFrameAvailable callback to fire. decoder.ReleaseOutputBuffer(decoderStatus, doRender); if (doRender) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "awaiting frame " + checkIndex); } assertEquals("Wrong time stamp", computePresentationTime(checkIndex), info.PresentationTimeUs); surface.AwaitNewImage(); surface.DrawImage(); if (!checkSurfaceFrame(checkIndex++)) { badFrames++; } } } } } return(badFrames); }
/** * Edits a stream of video data. */ private void editVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder, VideoChunks outputData) { const int TIMEOUT_USEC = 10000; ByteBuffer[] decoderInputBuffers = decoder.GetInputBuffers(); ByteBuffer[] encoderOutputBuffers = encoder.GetOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int outputCount = 0; bool outputDone = false; bool inputDone = false; bool decoderDone = false; while (!outputDone) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "edit loop"); } // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.NumChunks) { // End of stream -- send empty frame with EOS flag set. decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodecBufferFlags.EndOfStream); inputDone = true; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "sent input EOS (with zero-length frame)"); } } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.Clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.QueueInputBuffer(inputBufIndex, 0, inputBuf.Position(), time, (MediaCodecBufferFlags)flags); // TODO: Not sure if it's MediaCodecBufferFlags, verify. if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.Position() + " flags=" + flags); } inputChunk++; } } else { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "input buffer not available"); } } } // Assume output is available. Loop until both assumptions are false. bool decoderOutputAvailable = !decoderDone; bool encoderOutputAvailable = true; while (decoderOutputAvailable || encoderOutputAvailable) { // Start by draining any pending output from the encoder. It's important to // do this before we try to stuff any more data in. int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { // no output available yet if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "no output from encoder available"); } encoderOutputAvailable = false; } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { encoderOutputBuffers = encoder.GetOutputBuffers(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output buffers changed"); } } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { MediaFormat newFormat = encoder.OutputFormat; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output format changed: " + newFormat); } } else if (encoderStatus < 0) { fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else // encoderStatus >= 0 { ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { fail("encoderOutputBuffer " + encoderStatus + " was null"); } // Write the data to the output "file". if (info.Size != 0) { encodedData.Position(info.Offset); encodedData.Limit(info.Offset + info.Size); outputData.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs); outputCount++; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output " + info.Size + " bytes"); } } outputDone = (info.Flags & MediaCodec.BufferFlagEndOfStream) != 0; encoder.ReleaseOutputBuffer(encoderStatus, false); } if (encoderStatus != (int)MediaCodec.InfoTryAgainLater) { // Continue attempts to drain output. continue; } // Encoder is drained, check to see if we've got a new frame of output from // the decoder. (The output is going to a Surface, rather than a ByteBuffer, // but we still get information through BufferInfo.) if (!decoderDone) { int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == (int)MediaCodec.InfoTryAgainLater) { // no output available yet if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "no output from decoder available"); } decoderOutputAvailable = false; } else if (decoderStatus == (int)MediaCodec.InfoOutputBuffersChanged) { //decoderOutputBuffers = decoder.getOutputBuffers(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output buffers changed (we don't care)"); } } else if (decoderStatus == (int)MediaCodec.InfoOutputFormatChanged) { // expected before first buffer of data MediaFormat newFormat = decoder.OutputFormat; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output format changed: " + newFormat); } } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else // decoderStatus >= 0 { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.Size + "("); } // The ByteBuffers are null references, but we still get a nonzero // size for the decoded data. bool doRender = (info.Size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.ReleaseOutputBuffer(decoderStatus, doRender); if (doRender) { // This waits for the image and renders it after it arrives. if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "awaiting frame"); } outputSurface.AwaitNewImage(); outputSurface.DrawImage(); // Send it to the encoder. inputSurface.SetPresentationTime(info.PresentationTimeUs * 1000); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "swapBuffers"); } inputSurface.SwapBuffers(); } if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0) { // forward decoder EOS to encoder if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "signaling input EOS"); } if (WORK_AROUND_BUGS) { // Bail early, possibly dropping a frame. return; } else { encoder.SignalEndOfInputStream(); } } } } } } if (inputChunk != outputCount) { throw new RuntimeException("frame lost: " + inputChunk + " in, " + outputCount + " out"); } }