public bool setup(DecoderCallback callback_obj, int width, int height) //format_hint is aviFileContent { callbackThread = new HandlerThread("H264DecoderHandler"); callbackThread.Start(); handler = new Handler(callbackThread.Looper); mDecoder = MediaCodec.CreateDecoderByType(MIME); mCallbackObj = callback_obj; myCallback = new MyCallback(mDecoder, mCallbackObj); mDecoder.SetCallback(myCallback, handler); //mOutputFormat = mDecoder.GetOutputFormat(); // option B inputFormat = MediaFormat.CreateVideoFormat(MIME, width, height); inputFormat.SetInteger(MediaFormat.KeyMaxInputSize, width * height); inputFormat.SetInteger("durationUs", 63446722); //inputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatyuv420semiplanar); //inputFormat.SetInteger(MediaFormat.KeyIFrameInterval, 60); try { mDecoder.Configure(inputFormat, null, null, 0 /* Decoder */); } catch (Exception ex) { throw ex; } Console.WriteLine("before mDecoder.Start()"); mDecoder.Start(); Console.WriteLine("after mDecoder.Start()"); return(true); }
/** * Edits a video file, saving the contents to a new file. This involves decoding and * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy. * <p> * If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture * for output and a Surface for input, we can avoid issues with obscure formats and can * use a fragment shader to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight); } VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate)); outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate)); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.CreateEncoderByType(MIME_TYPE); encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.ChangeFragmentShader(FRAGMENT_SHADER); decoder.Configure(inputFormat, outputSurface.Surface, null, 0); decoder.Start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "shutting down encoder, decoder"); } if (outputSurface != null) { outputSurface.Release(); } if (inputSurface != null) { inputSurface.Release(); } if (encoder != null) { encoder.Stop(); encoder.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } } return(outputData); }
//@Override //@SuppressLint({ "InlinedApi", "NewApi" }) protected override void encodeWithMediaCodec() { int bufferSize = AudioRecord.GetMinBufferSize(mQuality.samplingRate, ChannelIn.Mono, Encoding.Pcm16bit) * 2; ((AACLATMPacketizer)mPacketizer).setSamplingRate(mQuality.samplingRate); mAudioRecord = new AudioRecord(Android.Media.AudioSource.Mic, mQuality.samplingRate, Android.Media.ChannelIn.Mono, Android.Media.Encoding.Pcm16bit, bufferSize); mMediaCodec = MediaCodec.CreateEncoderByType("audio/mp4a-latm"); MediaFormat format = new MediaFormat(); format.SetString(MediaFormat.KeyMime, "audio/mp4a-latm"); format.SetInteger(MediaFormat.KeyBitRate, mQuality.bitRate); format.SetInteger(MediaFormat.KeyChannelCount, 1); format.SetInteger(MediaFormat.KeySampleRate, mQuality.samplingRate); format.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecInfo.CodecProfileLevel.AACObjectLC); format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize); mMediaCodec.Configure(format, null, null, MediaCodecConfigFlags.Encode); mAudioRecord.StartRecording(); mMediaCodec.Start(); MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec); Java.Nio.ByteBuffer[] inputBuffers = mMediaCodec.GetInputBuffers(); mThread = new Thread(this); mThread.Start(); // The packetizer encapsulates this stream in an RTP stream and send it over the network mPacketizer.setInputStream(inputStream); mPacketizer.start(); mStreaming = true; }
/// <summary> /// Initialize this instance. /// First, stop decoder and check if surface exists. /// Then configure MediaFormat and MediaCodec and start it. /// </summary> /// <returns>The initialize.</returns> public bool Initialize() { Initialized = false; if (!StopDecoder() || _surface == null) { return(Initialized); } _mediaFormat = GetMediaFormat(_mimeType, _videoWidth, _videoHeight); _mediaFormat.SetInteger(MediaFormat.KeyMaxWidth, _videoWidth); _mediaFormat.SetInteger(MediaFormat.KeyMaxHeight, _videoHeight); if (Build.VERSION.SdkInt >= BuildVersionCodes.M) { _mediaFormat.SetInteger(MediaFormat.KeyOperatingRate, Short.MaxValue); } _mediaCodec.Configure( format: _mediaFormat, surface: _surface, crypto: null, flags: MediaCodecConfigFlags.None); _mediaCodec.SetVideoScalingMode(VideoScalingMode.ScaleToFit); _mediaCodec.SetCallback(this); _mediaCodec.Start(); Initialized = true; return(Initialized); }
/** * Instantiates and starts the encoder. * @throws IOException The encoder cannot be configured */ private void configureEncoder() { mEncoder = MediaCodec.CreateByCodecName(mEncoderName); MediaFormat mediaFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.SetInteger(MediaFormat.KeyBitRate, BITRATE); mediaFormat.SetInteger(MediaFormat.KeyFrameRate, FRAMERATE); mediaFormat.SetInteger(MediaFormat.KeyColorFormat, mEncoderColorFormat); mediaFormat.SetInteger(MediaFormat.KeyIFrameInterval, 1); mEncoder.Configure(mediaFormat, null, null, MediaCodecConfigFlags.Encode); mEncoder.Start(); }
public void SetupAudio(int sampleRate, int channels, byte[] esdsData) { _audioTrack = new AudioTrack( new AudioAttributes.Builder() .SetUsage(AudioUsageKind.Media) .SetContentType(AudioContentType.Music) .SetFlags(AudioFlags.LowLatency) .Build(), new Android.Media.AudioFormat.Builder() .SetEncoding(Encoding.Pcm16bit) .SetSampleRate(44100) .SetChannelMask(ChannelOut.Stereo) .Build(), 4096, AudioTrackMode.Stream, AudioManager.AudioSessionIdGenerate); MediaFormat audioFormat = MediaFormat.CreateAudioFormat( mime: MediaFormat.MimetypeAudioAac, sampleRate: sampleRate, channelCount: channels); audioFormat.SetInteger(MediaFormat.KeyIsAdts, 0); audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc); _audioCodec = MediaCodec.CreateDecoderByType( MediaFormat.MimetypeAudioAac); // TODO: Remove hardcoding byte profile = (byte)MediaCodecProfileType.Aacobjectlc; byte sampleIndex = AacAdtsAssembler.GetSamplingFrequencyIndex(sampleRate); byte[] csd0 = new byte[2]; csd0[0] = (byte)(((byte)profile << 3) | (sampleIndex >> 1)); csd0[1] = (byte)((byte)((sampleIndex << 7) & 0x80) | (channels << 3)); esdsData = csd0; audioFormat.SetByteBuffer("csd-0", Java.Nio.ByteBuffer.Wrap(esdsData)); _audioCodec.SetCallback(this); _audioCodec.Configure( format: audioFormat, surface: null, crypto: null, flags: MediaCodecConfigFlags.None); _audioCodec.Start(); _audioTrack.Play(); }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void prepareEncoder() { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, _bitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (VERBOSE) { Log.Debug(TAG, "format: " + format); } // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. // // If you want to have two EGL contexts -- one for display, one for recording -- // you will likely want to defer instantiation of CodecInputSurface until after the // "display" EGL context is created, then modify the eglCreateContext call to // take eglGetCurrentContext() as the share_context argument. mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE); mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode); _inputSurface = new InputSurface(mEncoder.CreateInputSurface()); mEncoder.Start(); // Output filename. Ideally this would use Context.getFilesDir() rather than a // hard-coded output directory. string outputPath = System.IO.Path.Combine(_workingDirectory, "test." + _width + "x" + _height + ".mp4"); Log.Info(TAG, "Output file is " + outputPath); // Create a MediaMuxer. We can't add the video track and start() the muxer here, // because our MediaFormat doesn't have the Magic Goodies. These can only be // obtained from the encoder after it has started processing data. // // We're not actually interested in multiplexing audio. We just want to convert // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. try { mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); } catch (System.Exception e) { throw new System.Exception(e.Message); } mTrackIndex = -1; mMuxerStarted = false; }
private void initHwDecoder(string codec) { MediaFormat format = new MediaFormat(); string mime_type = null; switch (codec) { case "amrnb": mime_type = MediaFormat.MimetypeAudioAmrNb; format.SetInteger(MediaFormat.KeySampleRate, 8000); format.SetInteger(MediaFormat.KeyBitRate, 7950); break; case "amrwb": mime_type = MediaFormat.MimetypeAudioAmrWb; format.SetInteger(MediaFormat.KeySampleRate, 16000); format.SetInteger(MediaFormat.KeyBitRate, 18250); break; } if (mime_type != null) { format.SetString(MediaFormat.KeyMime, mime_type); format.SetInteger(MediaFormat.KeyChannelCount, 1); format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize); format.SetInteger(MediaFormat.KeyLatency, 1); format.SetInteger(MediaFormat.KeyPriority, 0); audioDecoder = new HwDecoder(mime_type, format, this); audioDecoder.start(); } }
private void SetupEncoder() { MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MediaHelper.MIME_TYPE_AVC, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, mBitRate); outputFormat.SetInteger(MediaFormat.KeyFrameRate, mFrameRate); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, mIFrameInterval); mEncoder = MediaCodec.CreateEncoderByType(MediaHelper.MIME_TYPE_AVC); mEncoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); mInputSurface = new InputSurface(mEncoder.CreateInputSurface()); mInputSurface.MakeCurrent(); mEncoder.Start(); }
public void SetupVideo(int width, int height, byte[] spsData, byte[] ppsData) { MediaFormat videoFormat = MediaFormat.CreateVideoFormat( mime: MediaFormat.MimetypeVideoAvc, width: width, height: height); /* * TODO: Use SPS / PPS * videoFormat.SetByteBuffer("csd-0", Java.Nio.ByteBuffer.Wrap(spsData)); * videoFormat.SetByteBuffer("csd-1", Java.Nio.ByteBuffer.Wrap(ppsData)); */ videoFormat.SetInteger(MediaFormat.KeyMaxInputSize, 100000); _videoCodec = MediaCodec.CreateDecoderByType( MediaFormat.MimetypeVideoAvc); _videoCodec.SetCallback(this); _videoCodec.Configure(format: videoFormat, surface: new Surface(_surface), crypto: null, flags: MediaCodecConfigFlags.None); _videoCodec.Start(); }
public MediaFormat CreateAudioOutputFormat(MediaFormat inputFormat) { if (mAudioBitrate == AUDIO_BITRATE_AS_IS || mAudioChannels == AUDIO_CHANNELS_AS_IS) { return(null); } // Use original sample rate, as resampling is not supported yet. MediaFormat format = MediaFormat.CreateAudioFormat(MediaFormatExtraConstants.MimetypeAudioAac, inputFormat.GetInteger(MediaFormat.KeySampleRate), mAudioChannels); // this is obsolete: MediaCodecInfo.CodecProfileLevel.AACObjectLC, so using MediaCodecProfileType.Aacobjectlc instead format.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc); format.SetInteger(MediaFormat.KeyBitRate, mAudioBitrate); return(format); }
public MediaFormat CreateVideoOutputFormat(MediaFormat inputFormat) { int width = inputFormat.GetInteger(MediaFormat.KeyWidth); int height = inputFormat.GetInteger(MediaFormat.KeyHeight); int longer, shorter, outWidth, outHeight; if (width >= height) { longer = width; shorter = height; outWidth = LONGER_LENGTH; outHeight = SHORTER_LENGTH; } else { shorter = width; longer = height; outWidth = SHORTER_LENGTH; outHeight = LONGER_LENGTH; } //if (longer * 9 != shorter * 16) //{ // throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")"); //} if (shorter <= SHORTER_LENGTH) { #if DEBUG System.Console.WriteLine("This video is less or equal to 720p, pass-through. (" + width + "x" + height + ")"); #endif return(null); } MediaFormat format = MediaFormat.CreateVideoFormat("video/avc", outWidth, outHeight); format.SetInteger(MediaFormat.KeyBitRate, mVideoBitrate); format.SetInteger(MediaFormat.KeyFrameRate, 30); format.SetInteger(MediaFormat.KeyIFrameInterval, 3); // this is obsolete: MediaCodecInfo.CodecCapabilities.COLORFormatSurface, so using MediaCodecCapabilities.Formatsurface instead format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); return(format); }
/** * Instantiates and starts the decoder. * @throws IOException The decoder cannot be configured */ private void configureDecoder() { byte[] prefix = new byte[] { 0x00, 0x00, 0x00, 0x01 }; ByteBuffer csd0 = ByteBuffer.Allocate(4 + mSPS.Length + 4 + mPPS.Length); csd0.Put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.Put(mSPS); csd0.Put(new byte[] { 0x00, 0x00, 0x00, 0x01 }); csd0.Put(mPPS); mDecoder = MediaCodec.CreateByCodecName(mDecoderName); MediaFormat mediaFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.SetByteBuffer("csd-0", csd0); mediaFormat.SetInteger(MediaFormat.KeyColorFormat, mDecoderColorFormat); mDecoder.Configure(mediaFormat, null, null, 0); mDecoder.Start(); ByteBuffer[] decInputBuffers = mDecoder.GetInputBuffers(); int decInputIndex = mDecoder.DequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].Clear(); decInputBuffers[decInputIndex].Put(prefix); decInputBuffers[decInputIndex].Put(mSPS); mDecoder.QueueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].Position(), timestamp(), 0); } else { if (VERBOSE) { Log.e(TAG, "No buffer available !"); } } decInputIndex = mDecoder.DequeueInputBuffer(1000000 / FRAMERATE); if (decInputIndex >= 0) { decInputBuffers[decInputIndex].Clear(); decInputBuffers[decInputIndex].Put(prefix); decInputBuffers[decInputIndex].Put(mPPS); mDecoder.QueueInputBuffer(decInputIndex, 0, decInputBuffers[decInputIndex].Position(), timestamp(), 0); } else { if (VERBOSE) { Log.Error(TAG, "No buffer available !"); } } }
private void initEncoder(string codec) { MediaFormat format = new MediaFormat(); switch (codec) { case "amrnb": audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrNb); format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrNb); format.SetInteger(MediaFormat.KeySampleRate, 8000); format.SetInteger(MediaFormat.KeyBitRate, 7950); break; case "amrwb": audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrWb); format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrWb); format.SetInteger(MediaFormat.KeySampleRate, 16000); format.SetInteger(MediaFormat.KeyBitRate, 18250); break; default: throw new Exception("Unknown recorder codec selected " + codec); } format.SetInteger(MediaFormat.KeyChannelCount, 1); format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize); audioEncoder.SetCallback(this); audioEncoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); audioEncoder.Start(); }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void prepareEncoder(string outputPath) { _bfi = new MediaCodec.BufferInfo(); LatestOutputPath = outputPath; MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, _bitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. // // If you want to have two EGL contexts -- one for display, one for recording -- // you will likely want to defer instantiation of CodecInputSurface until after the // "display" EGL context is created, then modify the eglCreateContext call to // take eglGetCurrentContext() as the share_context argument. mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE); mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode); _inputSurface = new InputSurface(mEncoder.CreateInputSurface()); mEncoder.Start(); Log.Info(TAG, "Output file is " + outputPath); try { _muxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); } catch (System.Exception e) { throw new System.Exception(e.Message); } mTrackIndex = -1; MuxerStarted = false; }
/** * Video encoding is done by a MediaCodec. */ protected void encodeWithMediaCodecMethod1() { Log.Debug(TAG,"Video encoded using the MediaCodec API with a buffer"); // Updates the parameters of the camera if needed createCamera(); updateCamera(); // Estimates the frame rate of the camera measureFramerate(); // Starts the preview if needed if (!mPreviewStarted) { try { mCamera.startPreview(); mPreviewStarted = true; } catch (RuntimeException e) { destroyCamera(); throw e; } } EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); const NV21Convertor convertor = debugger.getNV21Convertor(); mMediaCodec = MediaCodec.CreateByCodecName(debugger.getEncoderName()); MediaFormat mediaFormat = MediaFormat.CreateVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.SetInteger(MediaFormat.KeyBitRate, mQuality.bitrate); mediaFormat.SetInteger(MediaFormat.KeyFrameRate, mQuality.framerate); mediaFormat.SetInteger(MediaFormat.KeyColorFormat,debugger.getEncoderColorFormat()); mediaFormat.SetInteger(MediaFormat.KeyIFrameInterval, 1); mMediaCodec.Configure(mediaFormat, null, null, MediaCodecConfigFlags.Encode); mMediaCodec.Start(); Camera.PreviewCallback callback = new Camera.PreviewCallback() Action InnerMethod = () =>{ long now = System.nanoTime()/1000, oldnow = now, i=0; ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); @Override
static public void config(Surface surface, int width, int height, byte[] sps, byte[] pps) { if (sps == null || pps == null)//not ready. { return; } if (bConfigured) { return; } if (codec != null) { stop(); } Decoder.width = width; Decoder.height = height; Decoder.sps = sps; Decoder.pps = pps; MediaFormat videoFormat = MediaFormat.CreateVideoFormat("video/avc", width, height); videoFormat.SetByteBuffer("csd-0", ByteBuffer.Wrap(sps)); videoFormat.SetByteBuffer("csd-1", ByteBuffer.Wrap(pps)); videoFormat.SetInteger("color-format", 19); string str = videoFormat.GetString("mime"); try { codec = MediaCodec.CreateDecoderByType(str); codec.Configure(videoFormat, surface, (MediaCrypto)null, 0); codec.SetVideoScalingMode(VideoScalingMode.ScaleToFit); codec.Start(); bConfigured = true; } catch (Exception ex) { var errstr = ex.Message.ToString(); } }
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to * avoid having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private bool generateVideoFile(VideoChunks output) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "generateVideoFile " + mWidth + "x" + mHeight); } MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.Error(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return(false); } if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "found codec: " + codecInfo.Name); } // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, mBitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.CreateByCodecName(codecInfo.Name); encoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "releasing encoder"); } encoder.Stop(); encoder.Release(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "released encoder"); } } if (inputSurface != null) { inputSurface.Release(); } } return(true); }
// https://github.com/lanhq147/SampleMediaFrame/blob/e2f20ff9eef73318e5a9b4de15458c5c2eb0fd46/app/src/main/java/com/google/android/exoplayer2/video/av/HWRecorder.java public bool BeginEncoding(int resX, int resY, int rateNumer, int rateDenom, int videoBitRate, int audioBitRate, string audioFile, string outputFile) { videoBufferInfo = new MediaCodec.BufferInfo(); audioBufferInfo = new MediaCodec.BufferInfo(); frameRateNumer = rateNumer; frameRateDenom = rateDenom; MediaFormat videoFormat = MediaFormat.CreateVideoFormat(VideoMimeType, resX, resY); videoFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); videoFormat.SetInteger(MediaFormat.KeyBitRate, videoBitRate * 1000); videoFormat.SetFloat(MediaFormat.KeyFrameRate, rateNumer / (float)rateDenom); videoFormat.SetInteger(MediaFormat.KeyIFrameInterval, 4); videoFormat.SetInteger(MediaFormat.KeyProfile, (int)MediaCodecProfileType.Avcprofilehigh); videoFormat.SetInteger(MediaFormat.KeyLevel, (int)MediaCodecProfileLevel.Avclevel31); videoEncoder = MediaCodec.CreateEncoderByType(VideoMimeType); videoEncoder.Configure(videoFormat, null, null, MediaCodecConfigFlags.Encode); surface = videoEncoder.CreateInputSurface(); videoEncoder.Start(); MediaFormat audioFormat = MediaFormat.CreateAudioFormat(AudioMimeType, 44100, 1); audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc); audioFormat.SetInteger(MediaFormat.KeyBitRate, audioBitRate * 1000); audioEncoder = MediaCodec.CreateEncoderByType(AudioMimeType); audioEncoder.Configure(audioFormat, null, null, MediaCodecConfigFlags.Encode); audioEncoder.Start(); try { muxer = new MediaMuxer(outputFile, MuxerOutputType.Mpeg4); } catch { return(false); } videoTrackIndex = -1; audioTrackIndex = -1; muxerStarted = false; if (!ElgInitialize()) { return(false); } audioData = File.ReadAllBytes(audioFile); if (audioData == null) { return(false); } DrainEncoder(videoEncoder, videoBufferInfo, videoTrackIndex, false); DrainEncoder(audioEncoder, audioBufferInfo, audioTrackIndex, false); audioEncodingTask = Task.Factory.StartNew(AudioEncodeThread, TaskCreationOptions.LongRunning); return(true); }
public void TranscodeAudio(SourceMedia sourceMedia, TargetMedia targetMedia, TrimConfig trimConfig, TransformationState transformationState) { if (targetMedia.targetFile.Exists()) { targetMedia.targetFile.Delete(); } transformationState.requestId = UUID.RandomUUID().ToString(); /* MediaTransformationListener transformationListener = new MediaTransformationListener(context, transformationState.requestId, transformationState, targetMedia); */ MediaRange mediaRange = trimConfig.enabled ? new MediaRange( TimeUnit.Milliseconds.ToMicros((long)(trimConfig.range[0] * 1000)), TimeUnit.Milliseconds.ToMicros((long)(trimConfig.range[1] * 1000))) : new MediaRange(0, long.MaxValue); try { var targetMimeType = targetMedia.writeToWav ? "audio/raw" : "audio/mp4a-latm"; IMediaTarget mediaTarget; if (targetMedia.writeToWav) { mediaTarget = new WavMediaTarget(targetMedia.targetFile.Path); } else { mediaTarget = new MediaMuxerMediaTarget(targetMedia.targetFile.Path, 1, 0, (int)MuxerOutputType.Mpeg4); } var mediaSource = new MediaExtractorMediaSource(context, sourceMedia.uri, mediaRange); List<TrackTransform> trackTransforms = new List<TrackTransform>(1); foreach (TargetTrack targetTrack in targetMedia.tracks) { if (targetTrack.format is AudioTrackFormat trackFormat) { MediaFormat mediaFormat = MediaFormat.CreateAudioFormat( targetMimeType, trackFormat.samplingRate, trackFormat.channelCount); mediaFormat.SetInteger(MediaFormat.KeyBitRate, trackFormat.bitrate); mediaFormat.SetLong(MediaFormat.KeyDuration, trackFormat.duration); IEncoder encoder; if (targetMedia.writeToWav) { encoder = new PassthroughBufferEncoder(8192); } else { encoder = new MediaCodecEncoder(); } //IEncoder encoder = targetMedia.writeToWav ? new PassthroughBufferEncoder(8192) : new MediaCodecEncoder(); TrackTransform trackTransform = new TrackTransform.Builder(mediaSource, targetTrack.sourceTrackIndex, mediaTarget) .SetTargetTrack(0) .SetDecoder(new MediaCodecDecoder()) .SetEncoder(encoder) .SetRenderer(new AudioRenderer(encoder)) .SetTargetFormat(mediaFormat) .Build(); trackTransforms.Add(trackTransform); break; } } _mediaTransformer.Transform( transformationState.requestId, trackTransforms, this, MediaTransformer.GranularityDefault); } catch (System.Exception err) { System.Diagnostics.Debug.WriteLine($"Exception when trying to transcode audio: {err.Message}"); } }