/** * Edits a video file, saving the contents to a new file. This involves decoding and * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy. * <p> * If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture * for output and a Surface for input, we can avoid issues with obscure formats and can * use a fragment shader to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight); } VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate)); outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate)); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.CreateEncoderByType(MIME_TYPE); encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.ChangeFragmentShader(FRAGMENT_SHADER); decoder.Configure(inputFormat, outputSurface.Surface, null, 0); decoder.Start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "shutting down encoder, decoder"); } if (outputSurface != null) { outputSurface.Release(); } if (inputSurface != null) { inputSurface.Release(); } if (encoder != null) { encoder.Stop(); encoder.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } } return(outputData); }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void prepareEncoder() { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, _bitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (VERBOSE) { Log.Debug(TAG, "format: " + format); } // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. // // If you want to have two EGL contexts -- one for display, one for recording -- // you will likely want to defer instantiation of CodecInputSurface until after the // "display" EGL context is created, then modify the eglCreateContext call to // take eglGetCurrentContext() as the share_context argument. mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE); mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode); _inputSurface = new InputSurface(mEncoder.CreateInputSurface()); mEncoder.Start(); // Output filename. Ideally this would use Context.getFilesDir() rather than a // hard-coded output directory. string outputPath = System.IO.Path.Combine(_workingDirectory, "test." + _width + "x" + _height + ".mp4"); Log.Info(TAG, "Output file is " + outputPath); // Create a MediaMuxer. We can't add the video track and start() the muxer here, // because our MediaFormat doesn't have the Magic Goodies. These can only be // obtained from the encoder after it has started processing data. // // We're not actually interested in multiplexing audio. We just want to convert // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. try { mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); } catch (System.Exception e) { throw new System.Exception(e.Message); } mTrackIndex = -1; mMuxerStarted = false; }
private void SetupEncoder() { MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MediaHelper.MIME_TYPE_AVC, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, mBitRate); outputFormat.SetInteger(MediaFormat.KeyFrameRate, mFrameRate); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, mIFrameInterval); mEncoder = MediaCodec.CreateEncoderByType(MediaHelper.MIME_TYPE_AVC); mEncoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); mInputSurface = new InputSurface(mEncoder.CreateInputSurface()); mInputSurface.MakeCurrent(); mEncoder.Start(); }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void prepareEncoder(string outputPath) { _bfi = new MediaCodec.BufferInfo(); LatestOutputPath = outputPath; MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, _bitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. // // If you want to have two EGL contexts -- one for display, one for recording -- // you will likely want to defer instantiation of CodecInputSurface until after the // "display" EGL context is created, then modify the eglCreateContext call to // take eglGetCurrentContext() as the share_context argument. mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE); mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode); _inputSurface = new InputSurface(mEncoder.CreateInputSurface()); mEncoder.Start(); Log.Info(TAG, "Output file is " + outputPath); try { _muxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); } catch (System.Exception e) { throw new System.Exception(e.Message); } mTrackIndex = -1; MuxerStarted = false; }
// https://github.com/lanhq147/SampleMediaFrame/blob/e2f20ff9eef73318e5a9b4de15458c5c2eb0fd46/app/src/main/java/com/google/android/exoplayer2/video/av/HWRecorder.java public bool BeginEncoding(int resX, int resY, int rateNumer, int rateDenom, int videoBitRate, int audioBitRate, string audioFile, string outputFile) { videoBufferInfo = new MediaCodec.BufferInfo(); audioBufferInfo = new MediaCodec.BufferInfo(); frameRateNumer = rateNumer; frameRateDenom = rateDenom; MediaFormat videoFormat = MediaFormat.CreateVideoFormat(VideoMimeType, resX, resY); videoFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); videoFormat.SetInteger(MediaFormat.KeyBitRate, videoBitRate * 1000); videoFormat.SetFloat(MediaFormat.KeyFrameRate, rateNumer / (float)rateDenom); videoFormat.SetInteger(MediaFormat.KeyIFrameInterval, 4); videoFormat.SetInteger(MediaFormat.KeyProfile, (int)MediaCodecProfileType.Avcprofilehigh); videoFormat.SetInteger(MediaFormat.KeyLevel, (int)MediaCodecProfileLevel.Avclevel31); videoEncoder = MediaCodec.CreateEncoderByType(VideoMimeType); videoEncoder.Configure(videoFormat, null, null, MediaCodecConfigFlags.Encode); surface = videoEncoder.CreateInputSurface(); videoEncoder.Start(); MediaFormat audioFormat = MediaFormat.CreateAudioFormat(AudioMimeType, 44100, 1); audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc); audioFormat.SetInteger(MediaFormat.KeyBitRate, audioBitRate * 1000); audioEncoder = MediaCodec.CreateEncoderByType(AudioMimeType); audioEncoder.Configure(audioFormat, null, null, MediaCodecConfigFlags.Encode); audioEncoder.Start(); try { muxer = new MediaMuxer(outputFile, MuxerOutputType.Mpeg4); } catch { return(false); } videoTrackIndex = -1; audioTrackIndex = -1; muxerStarted = false; if (!ElgInitialize()) { return(false); } audioData = File.ReadAllBytes(audioFile); if (audioData == null) { return(false); } DrainEncoder(videoEncoder, videoBufferInfo, videoTrackIndex, false); DrainEncoder(audioEncoder, audioBufferInfo, audioTrackIndex, false); audioEncodingTask = Task.Factory.StartNew(AudioEncodeThread, TaskCreationOptions.LongRunning); return(true); }
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to * avoid having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private bool generateVideoFile(VideoChunks output) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "generateVideoFile " + mWidth + "x" + mHeight); } MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.Error(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return(false); } if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "found codec: " + codecInfo.Name); } // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, mBitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.CreateByCodecName(codecInfo.Name); encoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "releasing encoder"); } encoder.Stop(); encoder.Release(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "released encoder"); } } if (inputSurface != null) { inputSurface.Release(); } } return(true); }