public void SetupVideo(int width, int height, byte[] spsData, byte[] ppsData) { MediaFormat videoFormat = MediaFormat.CreateVideoFormat( mime: MediaFormat.MimetypeVideoAvc, width: width, height: height); /* * TODO: Use SPS / PPS * videoFormat.SetByteBuffer("csd-0", Java.Nio.ByteBuffer.Wrap(spsData)); * videoFormat.SetByteBuffer("csd-1", Java.Nio.ByteBuffer.Wrap(ppsData)); */ videoFormat.SetInteger(MediaFormat.KeyMaxInputSize, 100000); _videoCodec = MediaCodec.CreateDecoderByType( MediaFormat.MimetypeVideoAvc); _videoCodec.SetCallback(this); _videoCodec.Configure(format: videoFormat, surface: new Surface(_surface), crypto: null, flags: MediaCodecConfigFlags.None); _videoCodec.Start(); }
private void initEncoder(string codec) { MediaFormat format = new MediaFormat(); switch (codec) { case "amrnb": audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrNb); format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrNb); format.SetInteger(MediaFormat.KeySampleRate, 8000); format.SetInteger(MediaFormat.KeyBitRate, 7950); break; case "amrwb": audioEncoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeAudioAmrWb); format.SetString(MediaFormat.KeyMime, MediaFormat.MimetypeAudioAmrWb); format.SetInteger(MediaFormat.KeySampleRate, 16000); format.SetInteger(MediaFormat.KeyBitRate, 18250); break; default: throw new Exception("Unknown recorder codec selected " + codec); } format.SetInteger(MediaFormat.KeyChannelCount, 1); format.SetInteger(MediaFormat.KeyMaxInputSize, bufferSize); audioEncoder.SetCallback(this); audioEncoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); audioEncoder.Start(); }
/// <summary> /// Initialize this instance. /// First, stop decoder and check if surface exists. /// Then configure MediaFormat and MediaCodec and start it. /// </summary> /// <returns>The initialize.</returns> public bool Initialize() { Initialized = false; if (!StopDecoder() || _surface == null) { return(Initialized); } _mediaFormat = GetMediaFormat(_mimeType, _videoWidth, _videoHeight); _mediaFormat.SetInteger(MediaFormat.KeyMaxWidth, _videoWidth); _mediaFormat.SetInteger(MediaFormat.KeyMaxHeight, _videoHeight); if (Build.VERSION.SdkInt >= BuildVersionCodes.M) { _mediaFormat.SetInteger(MediaFormat.KeyOperatingRate, Short.MaxValue); } _mediaCodec.Configure( format: _mediaFormat, surface: _surface, crypto: null, flags: MediaCodecConfigFlags.None); _mediaCodec.SetVideoScalingMode(VideoScalingMode.ScaleToFit); _mediaCodec.SetCallback(this); _mediaCodec.Start(); Initialized = true; return(Initialized); }
public bool Initialize() { _initialized = false; if (!StopDecoder()) { return(_initialized); } _mediaFormat = GetMediaFormat(_mimeType, _sampleRate, _channels); _mediaCodec = MediaCodec.CreateDecoderByType(_mimeType); _mediaCodec.Configure( format: _mediaFormat, surface: null, crypto: null, flags: MediaCodecConfigFlags.None); _audioTrack = GetAudioTrack(); _audioTrack.Play(); _mediaCodec.Start(); _encoderThread = GetEncoderThread(); _encoderThread.Start(); _decoderThread = GetDecoderThread(); _decoderThread.Start(); _initialized = true; return(_initialized); }
/** * Checks the video file to see if the contents match our expectations. We decode the * video to a Surface and check the pixels with GL. */ private void checkVideoFile(VideoChunks inputData) { OutputSurface surface = null; MediaCodec decoder = null; mLargestColorDelta = -1; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "checkVideoFile"); } try { surface = new OutputSurface(mWidth, mHeight); MediaFormat format = inputData.getMediaFormat(); decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); decoder.Configure(format, surface.Surface, null, 0); decoder.Start(); int badFrames = checkVideoData(inputData, decoder, surface); if (badFrames != 0) { fail("Found " + badFrames + " bad frames"); } } finally { if (surface != null) { surface.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } Log.Info(TAG, "Largest color delta: " + mLargestColorDelta); } }
/** * Constructs the {@link MediaCodecWrapper} wrapper object around the video codec. * The codec is created using the encapsulated information in the * {@link MediaFormat} object. * * @param trackFormat The format of the media object to be decoded. * @param surface Surface to render the decoded frames. * @return */ public static MediaCodecWrapper fromVideoFormat(MediaFormat trackFormat, Surface surface) { MediaCodecWrapper result = null; MediaCodec videoCodec = null; // BEGIN_INCLUDE(create_codec) string mimeType = trackFormat.GetString(MediaFormat.KeyMime); // Check to see if this is actually a video mime type. If it is, then create // a codec that can decode this mime type. if (mimeType.Contains("video/")) { videoCodec = MediaCodec.CreateDecoderByType(mimeType); videoCodec.Configure(trackFormat, surface, null, 0); } // If codec creation was successful, then create a wrapper object around the // newly created codec. if (videoCodec != null) { result = new MediaCodecWrapper(videoCodec); } // END_INCLUDE(create_codec) return(result); }
/** * Edits a video file, saving the contents to a new file. This involves decoding and * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy. * <p> * If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture * for output and a Surface for input, we can avoid issues with obscure formats and can * use a fragment shader to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "editVideoFile " + mWidth + "x" + mHeight); } VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecInfo.CodecCapabilities.COLORFormatSurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, inputFormat.GetInteger(MediaFormat.KeyBitRate)); outputFormat.SetInteger(MediaFormat.KeyFrameRate, inputFormat.GetInteger(MediaFormat.KeyFrameRate)); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, inputFormat.GetInteger(MediaFormat.KeyIFrameInterval)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.CreateEncoderByType(MIME_TYPE); encoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.CreateDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.ChangeFragmentShader(FRAGMENT_SHADER); decoder.Configure(inputFormat, outputSurface.Surface, null, 0); decoder.Start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "shutting down encoder, decoder"); } if (outputSurface != null) { outputSurface.Release(); } if (inputSurface != null) { inputSurface.Release(); } if (encoder != null) { encoder.Stop(); encoder.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } } return(outputData); }
protected override void OnDraw(Canvas canvas) { if (rtspCancel == null) { rtspClientStart(); } if (codec == null && (h264 || h265)) { codec = MediaCodec.CreateDecoderByType(h265 ? MediaFormat.MimetypeVideoHevc : MediaFormat.MimetypeVideoAvc); callbacks = new CallBacks(this); codec.SetCallback(callbacks); var mediafmt = MediaFormat.CreateVideoFormat(h265 ? MediaFormat.MimetypeVideoHevc : MediaFormat.MimetypeVideoAvc, 1920, 1080); codec.Configure(mediafmt, videoView.Holder.Surface, null, MediaCodecConfigFlags.None); codec.Start(); } base.OnDraw(canvas); }
public bool setup(DecoderCallback callback_obj, int width, int height) //format_hint is aviFileContent { callbackThread = new HandlerThread("H264DecoderHandler"); callbackThread.Start(); handler = new Handler(callbackThread.Looper); mDecoder = MediaCodec.CreateDecoderByType(MIME); mCallbackObj = callback_obj; myCallback = new MyCallback(mDecoder, mCallbackObj); mDecoder.SetCallback(myCallback, handler); //mOutputFormat = mDecoder.GetOutputFormat(); // option B inputFormat = MediaFormat.CreateVideoFormat(MIME, width, height); inputFormat.SetInteger(MediaFormat.KeyMaxInputSize, width * height); inputFormat.SetInteger("durationUs", 63446722); //inputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatyuv420semiplanar); //inputFormat.SetInteger(MediaFormat.KeyIFrameInterval, 60); try { mDecoder.Configure(inputFormat, null, null, 0 /* Decoder */); } catch (Exception ex) { throw ex; } Console.WriteLine("before mDecoder.Start()"); mDecoder.Start(); Console.WriteLine("after mDecoder.Start()"); return(true); }
protected async void InitializeDecoder() { decoder.Configure(format, surface, null, MediaCodecConfigFlags.None); decoder.Start(); inputBuffers = decoder.GetInputBuffers(); outputBuffers = decoder.GetOutputBuffers(); }
private void FeedClipToEncoder(SamplerClip clip) { mLastSampleTime = 0; MediaCodec decoder = null; MediaExtractor extractor = SetupExtractorForClip(clip); if (extractor == null) { return; } int trackIndex = GetVideoTrackIndex(extractor); extractor.SelectTrack(trackIndex); MediaFormat clipFormat = extractor.GetTrackFormat(trackIndex); if (clip.getStartTime() != -1) { extractor.SeekTo(clip.getStartTime() * 1000, MediaExtractorSeekTo.PreviousSync); clip.setStartTime(extractor.SampleTime / 1000); } try { decoder = MediaCodec.CreateDecoderByType(MediaHelper.MIME_TYPE_AVC); mOutputSurface = new OutputSurface(); decoder.Configure(clipFormat, mOutputSurface.Surface, null, 0); decoder.Start(); ResampleVideo(extractor, decoder, clip); } catch (System.Exception e) { } finally { if (mOutputSurface != null) { mOutputSurface.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } if (extractor != null) { extractor.Release(); extractor = null; } } }
public void Initialize(IServiceRegistry services, string url, long startPosition, long length) { if (isInitialized) { return; } try { inputFile = new Java.IO.File(url); if (!inputFile.CanRead()) { throw new Exception(string.Format("Unable to read: {0} ", inputFile.AbsolutePath)); } inputFileDescriptor = new Java.IO.FileInputStream(inputFile); // =================================================================================================== // Initialize the audio media extractor mediaExtractor = new MediaExtractor(); mediaExtractor.SetDataSource(inputFileDescriptor.FD, startPosition, length); var videoTrackIndex = FindTrack(mediaExtractor, MediaType.Video); var audioTrackIndex = FindTrack(mediaExtractor, MediaType.Audio); HasAudioTrack = audioTrackIndex >= 0; mediaTrackIndex = MediaType == MediaType.Audio ? audioTrackIndex : videoTrackIndex; if (mediaTrackIndex < 0) { throw new Exception(string.Format($"No {MediaType} track found in: {inputFile.AbsolutePath}")); } mediaExtractor.SelectTrack(mediaTrackIndex); var trackFormat = mediaExtractor.GetTrackFormat(mediaTrackIndex); MediaDuration = TimeSpanExtensions.FromMicroSeconds(trackFormat.GetLong(MediaFormat.KeyDuration)); ExtractMediaMetadata(trackFormat); // Create a MediaCodec mediadecoder, and configure it with the MediaFormat from the mediaExtractor // It's very important to use the format from the mediaExtractor because it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. var mime = trackFormat.GetString(MediaFormat.KeyMime); MediaDecoder = MediaCodec.CreateDecoderByType(mime); MediaDecoder.Configure(trackFormat, decoderOutputSurface, null, 0); isInitialized = true; StartWorker(); } catch (Exception e) { Release(); throw e; } }
/** * Instantiates and starts the encoder. * @throws IOException The encoder cannot be configured */ private void configureEncoder() { mEncoder = MediaCodec.CreateByCodecName(mEncoderName); MediaFormat mediaFormat = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); mediaFormat.SetInteger(MediaFormat.KeyBitRate, BITRATE); mediaFormat.SetInteger(MediaFormat.KeyFrameRate, FRAMERATE); mediaFormat.SetInteger(MediaFormat.KeyColorFormat, mEncoderColorFormat); mediaFormat.SetInteger(MediaFormat.KeyIFrameInterval, 1); mEncoder.Configure(mediaFormat, null, null, MediaCodecConfigFlags.Encode); mEncoder.Start(); }
public void SetupAudio(int sampleRate, int channels, byte[] esdsData) { _audioTrack = new AudioTrack( new AudioAttributes.Builder() .SetUsage(AudioUsageKind.Media) .SetContentType(AudioContentType.Music) .SetFlags(AudioFlags.LowLatency) .Build(), new Android.Media.AudioFormat.Builder() .SetEncoding(Encoding.Pcm16bit) .SetSampleRate(44100) .SetChannelMask(ChannelOut.Stereo) .Build(), 4096, AudioTrackMode.Stream, AudioManager.AudioSessionIdGenerate); MediaFormat audioFormat = MediaFormat.CreateAudioFormat( mime: MediaFormat.MimetypeAudioAac, sampleRate: sampleRate, channelCount: channels); audioFormat.SetInteger(MediaFormat.KeyIsAdts, 0); audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc); _audioCodec = MediaCodec.CreateDecoderByType( MediaFormat.MimetypeAudioAac); // TODO: Remove hardcoding byte profile = (byte)MediaCodecProfileType.Aacobjectlc; byte sampleIndex = AacAdtsAssembler.GetSamplingFrequencyIndex(sampleRate); byte[] csd0 = new byte[2]; csd0[0] = (byte)(((byte)profile << 3) | (sampleIndex >> 1)); csd0[1] = (byte)((byte)((sampleIndex << 7) & 0x80) | (channels << 3)); esdsData = csd0; audioFormat.SetByteBuffer("csd-0", Java.Nio.ByteBuffer.Wrap(esdsData)); _audioCodec.SetCallback(this); _audioCodec.Configure( format: audioFormat, surface: null, crypto: null, flags: MediaCodecConfigFlags.None); _audioCodec.Start(); _audioTrack.Play(); }
public VideoStreamer(int width, int height) { MediaFormat videoFormat = MediaFormat.CreateVideoFormat(MediaFormat.MimetypeVideoAvc, width, height); MediaCodecList availableCodecs = new MediaCodecList(MediaCodecListKind.RegularCodecs); MediaCodec encoder = MediaCodec.CreateEncoderByType(MediaFormat.MimetypeVideoAvc); _backgroundThread = new HandlerThread("EncoderBackgroundWorker"); _backgroundThread.Start(); _backgroundHandler = new Handler(_backgroundThread.Looper); encoder.SetCallback(new EncoderCallback(), _backgroundHandler); encoder.Configure(videoFormat, null, null, MediaCodecConfigFlags.Encode); encoder.Start(); }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void prepareEncoder() { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, _bitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (VERBOSE) { Log.Debug(TAG, "format: " + format); } // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. // // If you want to have two EGL contexts -- one for display, one for recording -- // you will likely want to defer instantiation of CodecInputSurface until after the // "display" EGL context is created, then modify the eglCreateContext call to // take eglGetCurrentContext() as the share_context argument. mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE); mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode); _inputSurface = new InputSurface(mEncoder.CreateInputSurface()); mEncoder.Start(); // Output filename. Ideally this would use Context.getFilesDir() rather than a // hard-coded output directory. string outputPath = System.IO.Path.Combine(_workingDirectory, "test." + _width + "x" + _height + ".mp4"); Log.Info(TAG, "Output file is " + outputPath); // Create a MediaMuxer. We can't add the video track and start() the muxer here, // because our MediaFormat doesn't have the Magic Goodies. These can only be // obtained from the encoder after it has started processing data. // // We're not actually interested in multiplexing audio. We just want to convert // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file. try { mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); } catch (System.Exception e) { throw new System.Exception(e.Message); } mTrackIndex = -1; mMuxerStarted = false; }
private void PrepareEncoder() { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { return; } int colorFormat; try { colorFormat = selectColorFormat(codecInfo, MIME_TYPE); } catch { colorFormat = (int)MediaCodecCapabilities.Formatyuv420semiplanar; } var format = MediaFormat.CreateVideoFormat(MIME_TYPE, _Width, _Height); format.SetInteger(MediaFormat.KeyColorFormat, colorFormat); format.SetInteger(MediaFormat.KeyBitRate, _BitRate); format.SetInteger(MediaFormat.KeyFrameRate, _frameRate); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); _Encoder = MediaCodec.CreateEncoderByType(MIME_TYPE); _Encoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); _Encoder.Start(); // Create a MediaMuxer. We can't add the video track and start() the muxer here, // because our MediaFormat doesn't have the Magic Goodies. These can only be // obtained from the encoder after it has started processing data. try { _Muxer = new MediaMuxer(_Filepath, MuxerOutputType.Mpeg4); } catch (Exception e) { Log.Error(TAG, e.Message, e); throw; } _TrackIndex = -1; _MuxerStarted = false; }
private void SetupEncoder() { MediaFormat outputFormat = MediaFormat.CreateVideoFormat(MediaHelper.MIME_TYPE_AVC, mWidth, mHeight); outputFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); outputFormat.SetInteger(MediaFormat.KeyBitRate, mBitRate); outputFormat.SetInteger(MediaFormat.KeyFrameRate, mFrameRate); outputFormat.SetInteger(MediaFormat.KeyIFrameInterval, mIFrameInterval); mEncoder = MediaCodec.CreateEncoderByType(MediaHelper.MIME_TYPE_AVC); mEncoder.Configure(outputFormat, null, null, MediaCodecConfigFlags.Encode); mInputSurface = new InputSurface(mEncoder.CreateInputSurface()); mInputSurface.MakeCurrent(); mEncoder.Start(); }
partial void InitializeMediaExtractor(string mediaDataUrl, long startPosition, long length) { if (mediaDataUrl == null) { throw new ArgumentNullException(nameof(mediaDataUrl)); } ReleaseMediaInternal(); InputFile = new Java.IO.File(mediaDataUrl); if (!InputFile.CanRead()) { throw new Exception(string.Format("Unable to read: {0} ", InputFile.AbsolutePath)); } InputFileStream = new Java.IO.FileInputStream(InputFile.AbsolutePath); audioMediaExtractor = new MediaExtractor(); audioMediaExtractor.SetDataSource(InputFileStream.FD, startPosition, length); trackIndexAudio = FindAudioTrack(audioMediaExtractor); if (trackIndexAudio < 0) { ReleaseMediaInternal(); Logger.Error($"The input file '{mediaDataUrl}' does not contain any audio track."); return; } audioMediaExtractor.SelectTrack(trackIndexAudio); var audioFormat = audioMediaExtractor.GetTrackFormat(trackIndexAudio); var mime = audioFormat.GetString(MediaFormat.KeyMime); audioMediaDecoder = MediaCodec.CreateDecoderByType(mime); audioMediaDecoder.Configure(audioFormat, null, null, 0); //Get the audio settings //should we override the settings (channels, sampleRate, ...) from DynamicSoundSource? Channels = audioFormat.GetInteger(MediaFormat.KeyChannelCount); SampleRate = audioFormat.GetInteger(MediaFormat.KeySampleRate); MediaDuration = TimeSpanExtensions.FromMicroSeconds(audioFormat.GetLong(MediaFormat.KeyDuration)); audioMediaDecoder.Start(); extractionOutputDone = false; extractionInputDone = false; }
public void start() { if (running) { return; } running = true; lock (availableBuffers) { availableBuffers.Clear(); } audioEncoder = MediaCodec.CreateEncoderByType(encoderMimeType); audioEncoder.SetCallback(this); audioEncoder.Configure(mediaFormat, null, null, MediaCodecConfigFlags.Encode); audioEncoder.Start(); }
static public void config(Surface surface, int width, int height, byte[] sps, byte[] pps) { if (sps == null || pps == null)//not ready. { return; } if (bConfigured) { return; } if (codec != null) { stop(); } Decoder.width = width; Decoder.height = height; Decoder.sps = sps; Decoder.pps = pps; MediaFormat videoFormat = MediaFormat.CreateVideoFormat("video/avc", width, height); videoFormat.SetByteBuffer("csd-0", ByteBuffer.Wrap(sps)); videoFormat.SetByteBuffer("csd-1", ByteBuffer.Wrap(pps)); videoFormat.SetInteger("color-format", 19); string str = videoFormat.GetString("mime"); try { codec = MediaCodec.CreateDecoderByType(str); codec.Configure(videoFormat, surface, (MediaCrypto)null, 0); codec.SetVideoScalingMode(VideoScalingMode.ScaleToFit); codec.Start(); bConfigured = true; } catch (Exception ex) { var errstr = ex.Message.ToString(); } }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes * mEncoder, mMuxer, mInputSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void prepareEncoder(string outputPath) { _bfi = new MediaCodec.BufferInfo(); LatestOutputPath = outputPath; MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, _width, _height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, _bitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. // // If you want to have two EGL contexts -- one for display, one for recording -- // you will likely want to defer instantiation of CodecInputSurface until after the // "display" EGL context is created, then modify the eglCreateContext call to // take eglGetCurrentContext() as the share_context argument. mEncoder = MediaCodec.CreateEncoderByType(MIME_TYPE); mEncoder.Configure(format, null, null, MediaCodec.ConfigureFlagEncode); _inputSurface = new InputSurface(mEncoder.CreateInputSurface()); mEncoder.Start(); Log.Info(TAG, "Output file is " + outputPath); try { _muxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); } catch (System.Exception e) { throw new System.Exception(e.Message); } mTrackIndex = -1; MuxerStarted = false; }
public void start() { if (running) { return; } running = true; delay = 5; lock (pendingFrames) { pendingFrames.Clear(); availableBuffers.Clear(); } audioDecoder = MediaCodec.CreateDecoderByType(decoderMimeType); audioDecoder.SetCallback(this); audioDecoder.Configure(mediaFormat, null, null, MediaCodecConfigFlags.None); audioDecoder.Start(); decodeThread = new Thread(decodeLoop); decodeThread.Start(); }
public void PrepareEncoder(string path, File _downloaddir) { MediaCodec _Decoder = null; MediaExtractor extractor = null; _downloadsfilesdir = _downloaddir; try { //for (int i = 0; i < extractor.TrackCount; i++) //{ // MediaFormat Format = extractor.GetTrackFormat(i); // //MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, 640, 360); // String mime = Format.GetString(MediaFormat.KeyMime); // if (mime.StartsWith("video/")) // { // extractor.SelectTrack(i); // _Decoder = MediaCodec.CreateEncoderByType(mime); // _Decoder.Configure(Format, null, null, 0); // break; // } //} extractor = new MediaExtractor(); extractor.SetDataSource(path); int trackIndex = selectTrack(extractor); //if (trackIndex < 0) //{ // throw new RuntimeException("No video track found in " + inputFile); //} extractor.SelectTrack(trackIndex); MediaFormat format = extractor.GetTrackFormat(trackIndex); _Width = format.GetInteger(MediaFormat.KeyWidth); _Height = format.GetInteger(MediaFormat.KeyHeight); // Could use width/height from the MediaFormat to get full-size frames. //outputSurface = new CodecOutputSurface(saveWidth, saveHeight); // Create a MediaCodec decoder, and configure it with the MediaFormat from the // extractor. It's very important to use the format from the extractor because // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. String mime = format.GetString(MediaFormat.KeyMime); _Decoder = MediaCodec.CreateDecoderByType(mime); _Decoder.Configure(format, null, null, 0); _Decoder.Start(); Decode(_Decoder, extractor); } catch (Exception e) { Log.Error(TAG, e.Message, e); throw; } finally { // release everything we grabbed //if (outputSurface != null) //{ // outputSurface.release(); // outputSurface = null; //} if (_Decoder != null) { _Decoder.Stop(); _Decoder.Release(); _Decoder = null; } if (extractor != null) { extractor.Release(); extractor = null; } } _TrackIndex = -1; //_MuxerStarted = false; }
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to * avoid having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private bool generateVideoFile(VideoChunks output) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "generateVideoFile " + mWidth + "x" + mHeight); } MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.Error(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return(false); } if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "found codec: " + codecInfo.Name); } // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); format.SetInteger(MediaFormat.KeyBitRate, mBitRate); format.SetInteger(MediaFormat.KeyFrameRate, FRAME_RATE); format.SetInteger(MediaFormat.KeyIFrameInterval, IFRAME_INTERVAL); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "format: " + format); } output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.CreateByCodecName(codecInfo.Name); encoder.Configure(format, null, null, MediaCodecConfigFlags.Encode); inputSurface = new InputSurface(encoder.CreateInputSurface()); inputSurface.MakeCurrent(); encoder.Start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "releasing encoder"); } encoder.Stop(); encoder.Release(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "released encoder"); } } if (inputSurface != null) { inputSurface.Release(); } } return(true); }
/** * Tests extraction from an MP4 to a series of PNG files. * <p> * We scale the video to 640x480 for the PNG just to demonstrate that we can scale the * video with the GPU. If the input video has a different aspect ratio, we could preserve * it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if * you're extracting frames you don't want black bars. */ public void extractMpegFrames(int saveWidth, int saveHeight) { MediaCodec decoder = null; CodecOutputSurface outputSurface = null; MediaExtractor extractor = null; try { // must be an absolute path The MediaExtractor error messages aren't very useful. Check to see if the input file exists so we can throw a better one if it's not there. File inputFile = new File(_filesdir, INPUT_FILE); if (!inputFile.CanRead()) { throw new FileNotFoundException("Unable to read " + inputFile); } extractor = new MediaExtractor(); extractor.SetDataSource(inputFile.ToString()); int trackIndex = selectTrack(extractor); if (trackIndex < 0) { throw new RuntimeException("No video track found in " + inputFile); } extractor.SelectTrack(trackIndex); MediaFormat format = extractor.GetTrackFormat(trackIndex); if (VERBOSE) { Log.Info(TAG, "Video size is " + format.GetInteger(MediaFormat.KeyWidth) + "x" + format.GetInteger(MediaFormat.KeyHeight)); } // Could use width/height from the MediaFormat to get full-size frames. outputSurface = new CodecOutputSurface(saveWidth, saveHeight); // Create a MediaCodec decoder, and configure it with the MediaFormat from the // extractor. It's very important to use the format from the extractor because // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. String mime = format.GetString(MediaFormat.KeyMime); decoder = MediaCodec.CreateDecoderByType(mime); decoder.Configure(format, outputSurface.getSurface(), null, 0); decoder.Start(); doExtract(extractor, trackIndex, decoder, outputSurface); } finally { // release everything we grabbed if (outputSurface != null) { outputSurface.release(); outputSurface = null; } if (decoder != null) { decoder.Stop(); decoder.Release(); decoder = null; } if (extractor != null) { extractor.Release(); extractor = null; } } }
// https://github.com/lanhq147/SampleMediaFrame/blob/e2f20ff9eef73318e5a9b4de15458c5c2eb0fd46/app/src/main/java/com/google/android/exoplayer2/video/av/HWRecorder.java public bool BeginEncoding(int resX, int resY, int rateNumer, int rateDenom, int videoBitRate, int audioBitRate, string audioFile, string outputFile) { videoBufferInfo = new MediaCodec.BufferInfo(); audioBufferInfo = new MediaCodec.BufferInfo(); frameRateNumer = rateNumer; frameRateDenom = rateDenom; MediaFormat videoFormat = MediaFormat.CreateVideoFormat(VideoMimeType, resX, resY); videoFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); videoFormat.SetInteger(MediaFormat.KeyBitRate, videoBitRate * 1000); videoFormat.SetFloat(MediaFormat.KeyFrameRate, rateNumer / (float)rateDenom); videoFormat.SetInteger(MediaFormat.KeyIFrameInterval, 4); videoFormat.SetInteger(MediaFormat.KeyProfile, (int)MediaCodecProfileType.Avcprofilehigh); videoFormat.SetInteger(MediaFormat.KeyLevel, (int)MediaCodecProfileLevel.Avclevel31); videoEncoder = MediaCodec.CreateEncoderByType(VideoMimeType); videoEncoder.Configure(videoFormat, null, null, MediaCodecConfigFlags.Encode); surface = videoEncoder.CreateInputSurface(); videoEncoder.Start(); MediaFormat audioFormat = MediaFormat.CreateAudioFormat(AudioMimeType, 44100, 1); audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc); audioFormat.SetInteger(MediaFormat.KeyBitRate, audioBitRate * 1000); audioEncoder = MediaCodec.CreateEncoderByType(AudioMimeType); audioEncoder.Configure(audioFormat, null, null, MediaCodecConfigFlags.Encode); audioEncoder.Start(); try { muxer = new MediaMuxer(outputFile, MuxerOutputType.Mpeg4); } catch { return(false); } videoTrackIndex = -1; audioTrackIndex = -1; muxerStarted = false; if (!ElgInitialize()) { return(false); } audioData = File.ReadAllBytes(audioFile); if (audioData == null) { return(false); } DrainEncoder(videoEncoder, videoBufferInfo, videoTrackIndex, false); DrainEncoder(audioEncoder, audioBufferInfo, audioTrackIndex, false); audioEncodingTask = Task.Factory.StartNew(AudioEncodeThread, TaskCreationOptions.LongRunning); return(true); }