/** * Instantiates and starts the decoder. * @throws IOException The decoder cannot be configured */ private void configureDecoder() { byte[] prefix = new byte[] { 0x00, 0x00, 0x00, 0x01 }; ByteBuffer csd0 = ByteBuffer.Allocate(4 + mSPS.Length + 4 + mPPS.Length); csd0.Put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.Put(mSPS); csd0.Put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.Put(mPPS); mDecoder = MediaCodec.CreateByCodecName(mDecoderName); MediaFormat mediaFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.SetByteBuffer("csd-0", csd0); mediaFormat.SetInteger(MediaFormat.KeyColorFormat, mDecoderColorFormat); mDecoder.Configure(mediaFormat, null, null, 0); mDecoder.Start(); ByteBuffer[] decInputBuffers = mDecoder.GetInputBuffers(); int decInputIndex = mDecoder.DequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].Clear(); decInputBuffers[decInputIndex].Put(prefix); decInputBuffers[decInputIndex].Put(mSPS); mDecoder.QueueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].Position(), timestamp(), 0); } else { if (VERBOSE) { Log.e(TAG, "No buffer available !"); } } decInputIndex = mDecoder.DequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].Clear(); decInputBuffers[decInputIndex].Put(prefix); decInputBuffers[decInputIndex].Put(mPPS); mDecoder.QueueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].Position(), timestamp(), 0); } else { if (VERBOSE) { Log.Error(TAG, "No buffer available !"); } } }
/** * Instantiates and starts the encoder. * @throws IOException The encoder cannot be configured */ private void configureEncoder() { mEncoder = MediaCodec.CreateByCodecName(mEncoderName); MediaFormat mediaFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.SetInteger(MediaFormat.KeyBitRate, BITRATE); mediaFormat.SetInteger(MediaFormat.KeyFrameRate, FRAMERATE); mediaFormat.SetInteger(MediaFormat.KeyColorFormat, mEncoderColorFormat); mediaFormat.SetInteger(MediaFormat.KeyIFrameInterval, 1); mEncoder.Configure(mediaFormat, null, null, MediaCodecConfigFlags.Encode); mEncoder.Start(); }
/** * Video encoding is done by a MediaCodec. */ protected void encodeWithMediaCodecMethod1() { Log.Debug(TAG,"Video encoded using the MediaCodec API with a buffer"); // Updates the parameters of the camera if needed createCamera(); updateCamera(); // Estimates the frame rate of the camera measureFramerate(); // Starts the preview if needed if (!mPreviewStarted) { try { mCamera.startPreview(); mPreviewStarted = true; } catch (RuntimeException e) { destroyCamera(); throw e; } } EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); const NV21Convertor convertor = debugger.getNV21Convertor(); mMediaCodec = MediaCodec.CreateByCodecName(debugger.getEncoderName()); MediaFormat mediaFormat = MediaFormat.CreateVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.SetInteger(MediaFormat.KeyBitRate, mQuality.bitrate); mediaFormat.SetInteger(MediaFormat.KeyFrameRate, mQuality.framerate); mediaFormat.SetInteger(MediaFormat.KeyColorFormat,debugger.getEncoderColorFormat()); mediaFormat.SetInteger(MediaFormat.KeyIFrameInterval, 1); mMediaCodec.Configure(mediaFormat, null, null, MediaCodecConfigFlags.Encode); mMediaCodec.Start(); Camera.PreviewCallback callback = new Camera.PreviewCallback() Action InnerMethod = () =>{ long now = System.nanoTime()/1000, oldnow = now, i=0; ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); @Override
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to * avoid having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private bool generateVideoFile(VideoChunks output) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "generateVideoFile " + mWidth + "x" + mHeight); } MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.Error(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return(false); } if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "found codec: " + codecInfo.Name); } // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, mBitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.CreateByCodecName(codecInfo.Name); encoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "releasing encoder"); } encoder.Stop(); encoder.Release(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "released encoder"); } } if (inputSurface != null) { inputSurface.Release(); } } return(true); }