/** * Checks the video file to see if the contents match our expectations. We decode the * video to a Surface and check the pixels with GL. */ private void checkVideoFile(VideoChunks inputData) { OutputSurface surface = null; MediaCodec decoder = null; mLargestColorDelta = -1; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "checkVideoFile"); } try { surface = new OutputSurface(mWidth, mHeight); MediaFormat format = inputData.getMediaFormat(); decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); decoder.Configure(format, surface.Surface, null, 0); decoder.Start(); int badFrames = checkVideoData(inputData, decoder, surface); if (badFrames != 0) { fail("Found " + badFrames + " bad frames"); } } finally { if (surface != null) { surface.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } Log.Info(TAG, "Largest color delta: " + mLargestColorDelta); } }
/** * Edits a video file, saving the contents to a new file. This involves decoding and * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy. * <p> * If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture * for output and a Surface for input, we can avoid issues with obscure formats and can * use a fragment shader to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight); } VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate)); outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate)); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.CreateEncoderByType(MIME_TYPE); encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.ChangeFragmentShader(FRAGMENT_SHADER); decoder.Configure(inputFormat, outputSurface.Surface, null, 0); decoder.Start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "shutting down encoder, decoder"); } if (outputSurface != null) { outputSurface.Release(); } if (inputSurface != null) { inputSurface.Release(); } if (encoder != null) { encoder.Stop(); encoder.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } } return(outputData); }
/** * Releases the SurfaceTexture. */ private void releaseSurfaceTexture() { if (_outputSurface != null) { _outputSurface.Release(); _outputSurface = null; } }