public void decode(byte[] array) { if (bConfigured == false) { Init(); } var nalType = array[4] & 0x1f; if (nalType == 7) { //sps = array.ToArray(); if (array.Length != sps.Length) { stop(); sps = array.ToArray(); Init(); } return; } if (nalType == 8) { //pps = array.ToArray(); return; } if (bConfigured == false) { return; } if (bConfigured) { try { ByteBuffer[] inputBuffers = codec.GetInputBuffers(); ByteBuffer[] outputBuffers = codec.GetOutputBuffers(); int dequeueInputBuffer = codec.DequeueInputBuffer(-1L); if (dequeueInputBuffer >= 0) { //Send data to decoder. ByteBuffer byteBuffer = inputBuffers[dequeueInputBuffer]; byteBuffer.Clear(); byteBuffer.Put(array); codec.QueueInputBuffer(dequeueInputBuffer, 0, array.Length, 0L, 0); } //Show decoded frame MediaCodec.BufferInfo BufferInfo = new MediaCodec.BufferInfo(); int i = codec.DequeueOutputBuffer(BufferInfo, 0L); while (i >= 0) { /*if (picSurface == null)//Only if not using display surface. { ByteBuffer byteBuffer2 = outputBuffers[i]; if (buffer == null || buffer.Length != BufferInfo.Size) { buffer = new byte[BufferInfo.Size]; } byteBuffer2.Get(buffer); //do something with raw frame in buffer. }*/ codec.ReleaseOutputBuffer(i, true); codec.SetVideoScalingMode(VideoScalingMode.ScaleToFit); i = codec.DequeueOutputBuffer(BufferInfo, 0L); } } catch (Exception ex) { //attempt to recover. stop(); } } return;// ret; }
public static Result DecodeAudio(FileDescriptor descriptor, long offset, long length) { using (var extractor = new MediaExtractor()) { extractor.SetDataSource(descriptor, offset, length); MediaFormat format = null; string mime = null; for (int i = 0; i < extractor.TrackCount; i++) { format = extractor.GetTrackFormat(i); mime = format.GetString(MediaFormat.KeyMime); if (!mime.StartsWith("audio/")) { continue; } extractor.SelectTrack(i); } if (format == null || !mime.StartsWith("audio/")) { throw new ContentLoadException("Could not find any audio track."); } int sampleRate = format.GetInteger(MediaFormat.KeySampleRate); long duration = format.GetLong(MediaFormat.KeyDuration); int channels = format.GetInteger(MediaFormat.KeyChannelCount); int samples = (int)(sampleRate * duration / 1000000d); var output = new byte[samples * 2]; int timeoutsLeft = 1000; var decoder = MediaCodecPool.RentDecoder(mime); try { decoder.Configure(format, null, null, MediaCodecConfigFlags.None); decoder.Start(); ByteBuffer[] inputBuffers = decoder.GetInputBuffers(); ByteBuffer[] outputBuffers = decoder.GetOutputBuffers(); var bufferInfo = new MediaCodec.BufferInfo(); int totalOffset = 0; bool endOfStream = false; while (true) { // we dont need to have a endOfStream local, // but it saves us a few calls to the decoder if (!endOfStream) { int inputBufIndex = decoder.DequeueInputBuffer(5000); if (inputBufIndex >= 0) { int size = extractor.ReadSampleData(inputBuffers[inputBufIndex], 0); if (size > 0) { decoder.QueueInputBuffer( inputBufIndex, 0, size, extractor.SampleTime, MediaCodecBufferFlags.None); } if (!extractor.Advance()) { endOfStream = true; decoder.QueueInputBuffer( inputBufIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); } } } int decoderStatus = decoder.DequeueOutputBuffer(bufferInfo, 5000); if (decoderStatus >= 0) { IntPtr bufferPtr = outputBuffers[decoderStatus].GetDirectBufferAddress(); IntPtr offsetPtr = bufferPtr + bufferInfo.Offset; int size = bufferInfo.Size; Marshal.Copy(offsetPtr, output, totalOffset, size); decoder.ReleaseOutputBuffer(decoderStatus, render: false); totalOffset += size; if (bufferInfo.Flags == MediaCodecBufferFlags.EndOfStream) { if (totalOffset != output.Length) { throw new ContentLoadException( "Reached end of stream before reading expected amount of samples."); } break; } } else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { outputBuffers = decoder.GetOutputBuffers(); } else if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater) { if (timeoutsLeft-- <= 0) { break; } } } } finally { decoder.Stop(); MediaCodecPool.ReturnDecoder(mime, decoder); } if (timeoutsLeft <= 0) { throw new ContentLoadException("Could not load sound effect in designated time frame."); } return(new Result(output, sampleRate, channels, mime)); } }
public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { //var buffer = codec.GetOutputBuffer(index); //Console.WriteLine("OnOutputBufferAvailable"); codec.ReleaseOutputBuffer(index, true); }
private void DrainEncoder(MediaCodec encoder, MediaCodec.BufferInfo bufferInfo, int trackIndex, bool endOfStream) { Debug.WriteLine($"DrainEncoder {endOfStream})"); const int TIMEOUT_USEC = 10000; if (endOfStream) { Debug.WriteLine("Sending EOS to encoder"); encoder.SignalEndOfInputStream(); } ByteBuffer[] encoderOutputBuffers = encoder.GetOutputBuffers(); while (true) { int encoderStatus = encoder.DequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { // no output available yet if (!endOfStream) { break; // out of while } else { Debug.WriteLine("No output available, spinning to await EOS"); } } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { encoderOutputBuffers = encoder.GetOutputBuffers(); } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { Debug.Assert(!muxerStarted); MediaFormat newFormat = encoder.OutputFormat; Debug.WriteLine($"Encoder output format changed: {newFormat}"); var isVideo = encoder == videoEncoder; if (isVideo) { videoTrackIndex = muxer.AddTrack(newFormat); trackIndex = videoTrackIndex; } else { audioTrackIndex = muxer.AddTrack(newFormat); trackIndex = audioTrackIndex; } // now that we have the Magic Goodies, start the muxer if (videoTrackIndex >= 0 && audioTrackIndex >= 0) { muxer.Start(); muxerStarted = true; muxerStartEvent.Set(); } } else if (encoderStatus < 0) { Debug.WriteLine($"Unexpected result from encoder.dequeueOutputBuffer: {encoderStatus}"); } else { ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; Debug.Assert(encodedData != null); if ((bufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0) { Debug.WriteLine("Ignoring BUFFER_FLAG_CODEC_CONFIG"); bufferInfo.Size = 0; } if (bufferInfo.Size != 0) { muxerStartEvent.WaitOne(); encodedData.Position(bufferInfo.Offset); encodedData.Limit(bufferInfo.Offset + bufferInfo.Size); muxer.WriteSampleData(trackIndex, encodedData, bufferInfo); Debug.WriteLine($"Sent {bufferInfo.Size} bytes to muxer"); } encoder.ReleaseOutputBuffer(encoderStatus, false); if ((bufferInfo.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { if (!endOfStream) { Debug.WriteLine("Reached end of stream unexpectedly"); } else { Debug.WriteLine("End of stream reached"); } break; } } } }
// https://github.com/lanhq147/SampleMediaFrame/blob/e2f20ff9eef73318e5a9b4de15458c5c2eb0fd46/app/src/main/java/com/google/android/exoplayer2/video/av/HWRecorder.java public bool BeginEncoding(int resX, int resY, int rateNumer, int rateDenom, int videoBitRate, int audioBitRate, string audioFile, string outputFile) { videoBufferInfo = new MediaCodec.BufferInfo(); audioBufferInfo = new MediaCodec.BufferInfo(); frameRateNumer = rateNumer; frameRateDenom = rateDenom; MediaFormat videoFormat = MediaFormat.CreateVideoFormat(VideoMimeType, resX, resY); videoFormat.SetInteger(MediaFormat.KeyColorFormat, (int)MediaCodecCapabilities.Formatsurface); videoFormat.SetInteger(MediaFormat.KeyBitRate, videoBitRate * 1000); videoFormat.SetFloat(MediaFormat.KeyFrameRate, rateNumer / (float)rateDenom); videoFormat.SetInteger(MediaFormat.KeyIFrameInterval, 4); videoFormat.SetInteger(MediaFormat.KeyProfile, (int)MediaCodecProfileType.Avcprofilehigh); videoFormat.SetInteger(MediaFormat.KeyLevel, (int)MediaCodecProfileLevel.Avclevel31); videoEncoder = MediaCodec.CreateEncoderByType(VideoMimeType); videoEncoder.Configure(videoFormat, null, null, MediaCodecConfigFlags.Encode); surface = videoEncoder.CreateInputSurface(); videoEncoder.Start(); MediaFormat audioFormat = MediaFormat.CreateAudioFormat(AudioMimeType, 44100, 1); audioFormat.SetInteger(MediaFormat.KeyAacProfile, (int)MediaCodecProfileType.Aacobjectlc); audioFormat.SetInteger(MediaFormat.KeyBitRate, audioBitRate * 1000); audioEncoder = MediaCodec.CreateEncoderByType(AudioMimeType); audioEncoder.Configure(audioFormat, null, null, MediaCodecConfigFlags.Encode); audioEncoder.Start(); try { muxer = new MediaMuxer(outputFile, MuxerOutputType.Mpeg4); } catch { return(false); } videoTrackIndex = -1; audioTrackIndex = -1; muxerStarted = false; if (!ElgInitialize()) { return(false); } audioData = File.ReadAllBytes(audioFile); if (audioData == null) { return(false); } DrainEncoder(videoEncoder, videoBufferInfo, videoTrackIndex, false); DrainEncoder(audioEncoder, audioBufferInfo, audioTrackIndex, false); audioEncodingTask = Task.Factory.StartNew(AudioEncodeThread, TaskCreationOptions.LongRunning); return(true); }
/// <summary> /// if both inputPath string and inputUri are not null, this /// method will use the Uri. Else, set one or the other /// /// They cannot both be null /// </summary> /// <param name="startMs">the start ms for trimming</param> /// <param name="endMs">the final ms for trimming</param> /// <param name="inputPath">optional input path string</param> /// <param name="muxer">the muxer to use for writing bytes</param> /// <param name="trackIndexOverride">the track index for muxer read/write to</param> /// <param name="bufferInfo">an input bufferinfo to get properties from</param> /// <param name="outputPath">the output path for method to check after finished encoding</param> /// <param name="ptOffset">the presentation time offset for audio, used in syncing audio and video</param> /// <param name="inputUri">optional inputUri to read from</param> /// <returns></returns> public async Task <string> HybridMuxingTrimmer(int startMs, int endMs, string inputPath, MediaMuxer muxer, int trackIndexOverride = -1, BufferInfo bufferInfo = null, string outputPath = null, long ptOffset = 0, Android.Net.Uri inputUri = null) { var tio = trackIndexOverride; await Task.Run(() => { if (outputPath == null) { outputPath = FileToMp4.LatestOutputPath; } MediaExtractor ext = new MediaExtractor(); if (inputUri != null) { ext.SetDataSource(Android.App.Application.Context, inputUri, null); } else { ext.SetDataSource(inputPath); } int trackCount = ext.TrackCount; Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = ext.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = false; } /*rerouted to gl video encoder*/ if (selectCurrentTrack) { ext.SelectTrack(i); if (tio != -1) { indexDict.Add(i, i); } if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } } } MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); if (!System.String.IsNullOrWhiteSpace(inputPath)) { retrieverSrc.SetDataSource(inputPath); } else { retrieverSrc.SetDataSource(Android.App.Application.Context, inputUri); } string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation); if (degreesString != null) // unused ATM but will be useful for stabilized videoview in streaming { int degrees = int.Parse(degreesString); if (degrees >= 0) /* muxer.SetOrientationHint(degrees); */ } { //muxer won't accept this param once started } if (startMs > 0) { ext.SeekTo(startMs * 1000, MediaExtractorSeekTo.ClosestSync); } int offset = 0; if (bufferInfo == null) { bufferInfo = new MediaCodec.BufferInfo(); } ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); long us = endMs * 1000; long uo = us + ptOffset; int cf = 0; try { FileToMp4.AudioEncodingInProgress = true; while (true) { bufferInfo.Offset = offset; bufferInfo.Size = ext.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { bufferInfo.Size = 0; break; } else { cf++; bufferInfo.PresentationTimeUs = ext.SampleTime + ptOffset; if (ext.SampleTime >= us) { break; } //out of while else { bufferInfo.Flags = MFlags2MCodecBuff(ext.SampleFlags); if (tio == -1) { muxer.WriteSampleData(FileToMp4.LatestAudioTrackIndex, dstBuf, bufferInfo); } else { muxer.WriteSampleData(tio, dstBuf, bufferInfo); } if (cf >= 240) //only send the muxer eventargs once every x frames to reduce CPU load { Notify(ext.SampleTime, us); cf = 0; } } ext.Advance(); } } } catch (Java.Lang.IllegalStateException e) { this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true)); Console.WriteLine("The source video file is malformed"); } catch (Java.Lang.Exception ex) { this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true)); Console.WriteLine(ex.Message); } if (AppSettings.Logging.SendToConsole) { System.Console.WriteLine($"DrainEncoder audio finished @ {bufferInfo.PresentationTimeUs}"); } }); FileToMp4.AudioEncodingInProgress = false; try { if (!FileToMp4.VideoEncodingInProgress) { muxer.Stop(); muxer.Release(); muxer = null; } } catch (Java.Lang.Exception ex) { Log.Debug("MuxingEncoder", ex.Message); } if (outputPath != null) { var success = System.IO.File.Exists(outputPath); if (success) { this.Progress.Invoke(new MuxerEventArgs(endMs * 1000, endMs, outputPath, true)); return(outputPath); } } return(null); //nothing to look for }
private bool ExtractSomeAudioData(out bool endOfFile) { endOfFile = extractionOutputDone; if (endOfFile) { return(false); } var hasExtractedData = false; int TimeoutUs = 20000; MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); if (!extractionInputDone) { int inputBufIndex = audioMediaDecoder.DequeueInputBuffer(TimeoutUs); if (inputBufIndex >= 0) { Java.Nio.ByteBuffer inputBuffer = audioMediaDecoder.GetInputBuffer(inputBufIndex); //Read the sample data into the ByteBuffer. This neither respects nor updates inputBuf's position, limit, etc. int chunkSize = audioMediaExtractor.ReadSampleData(inputBuffer, 0); if (chunkSize < 0) { //End of stream: send empty frame with EOS flag set audioMediaDecoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodecBufferFlags.EndOfStream); extractionInputDone = true; //Logger.Verbose("sent input EOS"); } else { if (audioMediaExtractor.SampleTrackIndex != trackIndexAudio) { Logger.Warning(string.Format("got audio sample from track {0}, expected {1}", audioMediaExtractor.SampleTrackIndex, trackIndexAudio)); } var presentationTimeMicroSeconds = audioMediaExtractor.SampleTime; audioMediaDecoder.QueueInputBuffer(inputBufIndex, 0, chunkSize, presentationTimeMicroSeconds, 0); audioMediaExtractor.Advance(); } } else { //do nothing: the input buffer queue is full (we need to output them first) //continue; } } int decoderStatus = audioMediaDecoder.DequeueOutputBuffer(info, TimeoutUs); switch (decoderStatus) { case (int)MediaCodecInfoState.TryAgainLater: { Logger.Verbose("no output from decoder available"); break; } case (int)MediaCodecInfoState.OutputFormatChanged: { MediaFormat newFormat = audioMediaDecoder.OutputFormat; string newFormatStr = newFormat.ToString(); Logger.Verbose("audio decoder output format changed: " + newFormatStr); break; } case (int)MediaCodecInfoState.OutputBuffersChanged: { //deprecated: we just ignore it break; } default: { if (decoderStatus < 0) { throw new InvalidOperationException(string.Format("unexpected result from audio decoder.DequeueOutputBuffer: {0}", decoderStatus)); } if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Logger.Verbose("audio: output EOS"); extractionOutputDone = true; } if (info.Size > 0) { hasExtractedData = true; var buffer = audioMediaDecoder.GetOutputBuffer(decoderStatus); var presentationTime = TimeSpanExtensions.FromMicroSeconds(info.PresentationTimeUs); if (storageBuffer.CountDataBytes + info.Size <= storageBuffer.Data.Length) { buffer.Get(storageBuffer.Data, storageBuffer.CountDataBytes, info.Size); // Read the buffer all at once buffer.Clear(); // MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN buffer.Position(0); if (storageBuffer.CountDataBytes == 0) { storageBuffer.PresentationTime = presentationTime; } storageBuffer.CountDataBytes += info.Size; } else { Logger.Error("The storage buffer has reached full capacity. Current data will be dropped"); } } audioMediaDecoder.ReleaseOutputBuffer(decoderStatus, false); break; } } endOfFile = extractionOutputDone; return(hasExtractedData); }
public Task <bool> TrimAsync(int startMS, int lengthMS, string inputPath, string outputPath) { return(Task.Run <bool>(() => { try { bool didOperationSucceed = false; MediaExtractor extractor = new MediaExtractor(); extractor.SetDataSource(inputPath); int trackCount = extractor.TrackCount; // Set up MediaMuxer for the destination. MediaMuxer muxer; muxer = new MediaMuxer(outputPath, MuxerOutputType.Mpeg4); // Set up the tracks and retrieve the max buffer size for selected // tracks. Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = true; } if (selectCurrentTrack) { extractor.SelectTrack(i); int dstIndex = muxer.AddTrack(format); indexDict.Add(i, dstIndex); if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } } } if (bufferSize < 0) { bufferSize = 1337; //TODO: I don't know what to put here tbh, it will most likely be above 0 at this point anyways :) } // Set up the orientation and starting time for extractor. MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); retrieverSrc.SetDataSource(inputPath); string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation); if (degreesString != null) { int degrees = int.Parse(degreesString); if (degrees >= 0) { muxer.SetOrientationHint(degrees); } } if (startMS > 0) { extractor.SeekTo(startMS * 1000, MediaExtractorSeekTo.ClosestSync); } // Copy the samples from MediaExtractor to MediaMuxer. We will loop // for copying each sample and stop when we get to the end of the source // file or exceed the end time of the trimming. int offset = 0; int trackIndex = -1; ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); try { muxer.Start(); while (true) { bufferInfo.Offset = offset; bufferInfo.Size = extractor.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { bufferInfo.Size = 0; break; } else { bufferInfo.PresentationTimeUs = extractor.SampleTime; if (lengthMS > 0 && bufferInfo.PresentationTimeUs > ((startMS + lengthMS - 1) * 1000)) { Console.WriteLine("The current sample is over the trim end time."); break; } else { bufferInfo.Flags = ConvertMediaExtractorSampleFlagsToMediaCodecBufferFlags(extractor.SampleFlags); trackIndex = extractor.SampleTrackIndex; muxer.WriteSampleData(indexDict[trackIndex], dstBuf, bufferInfo); extractor.Advance(); } } } muxer.Stop(); didOperationSucceed = true; //deleting the old file //JFile file = new JFile(srcPath); //file.Delete(); } catch (IllegalStateException e) { // Swallow the exception due to malformed source. Console.WriteLine("The source video file is malformed"); } finally { muxer.Release(); } return didOperationSucceed; } catch (System.Exception xx) { return false; } })); // Set up MediaExtractor to read from the source. }
private string EncodeFileToMp4(string inputPath, string outputPath, bool encodeAudio = true, Android.Net.Uri inputUri = null) { LatestInputVideoLength = AudioEncoding.GetVideoLength(inputPath, inputUri); LatestAudioInputFormat = AudioEncoding.GetAudioTrackFormat(inputPath, inputUri); EstimateTotalSize(LatestInputVideoLength, _bitRate); try { prepareMediaPlayer(inputPath, inputUri); prepareEncoder(outputPath); _inputSurface.MakeCurrent(); prepareWeakSurfaceTexture(); _mediaPlayer.Start(); _mediaPlayer.SetAudioStreamType(Android.Media.Stream.VoiceCall); _mediaPlayer.SetVolume(0, 0); _frameCount = 0; } catch (System.Exception ex) { Log.Debug("VideoEncoder", ex.Message); } VideoEncodingInProgress = true; while (true) { D(false); _frameCount++; /* * Disable this to make it faster when not debugging */ #if DEBUG if (_frameCount >= 120 && AppSettings.Logging.SendToConsole) { System.Console.WriteLine($"FileToMp4 exited @ {_outputSurface.WeakSurfaceTexture.Timestamp} " + $" | encoded bits {_bitsEncodedSoFar} of estimated {_estimatedTotalSize}"); } #endif // Acquire a new frame of input, and render it to the Surface. If we had a // GLSurfaceView we could switch EGL contexts and call drawImage() a second // time to render it on screen. The texture can be shared between contexts by // passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context // argument. if (!_outputSurface.AwaitNewImage(true)) { break; } _outputSurface.DrawImage(); // Set the presentation time stamp from the WeakSurfaceTexture's time stamp. This // will be used by MediaMuxer to set the PTS in the video. _inputSurface.SetPresentationTime(_outputSurface.WeakSurfaceTexture.Timestamp); //if (AppSettings.Logging.SendToConsole) Log.Debug("MediaLoop", "Set Time " + st.Timestamp); // Submit it to the encoder. The eglSwapBuffers call will block if the input // is full, which would be bad if it stayed full until we dequeued an output // buffer (which we can't do, since we're stuck here). So long as we fully drain // the encoder before supplying additional input, the system guarantees that we // can supply another frame without blocking. //if (AppSettings.Logging.SendToConsole) Log.Debug(TAG, "sending frame to encoder:"); _inputSurface.SwapBuffers(); if (_bitsEncodedSoFar >= _estimatedTotalSize) { break; } } D(true); VideoEncodingInProgress = false; #if DEBUG if (AppSettings.Logging.SendToConsole) { System.Console.WriteLine($"DrainEncoder started @ {_firstKnownBuffer} exited @ " + $"{_outputSurface.WeakSurfaceTexture.Timestamp} " + $"| encoded bits {_bitsEncodedSoFar} of estimated {_estimatedTotalSize}"); } #endif try { releaseMediaPlayer(); releaseEncoder(); releaseWeakSurfaceTexture(); }catch { } _firstKnownBuffer = 0; _estimatedTotalSize = 0; _frameCount = 0; _bitsEncodedSoFar = 0; _bfi = new BufferInfo(); if (!AudioEncodingInProgress) { _muxer.Stop(); // if the audio encoding isn't still running then we'll stop everything and return _muxer.Release(); _muxer = null; if (File.Exists(outputPath)) { this.Progress.Invoke(new EncoderMinArgs(EncodedBits(_bfi.Size), _estimatedTotalSize, true, false, outputPath)); return(outputPath); } } this.Progress.Invoke(new EncoderMinArgs(EncodedBits(_bfi.Size), _estimatedTotalSize, false, false, null)); return(null); //file isn't finished processing yet }
private void ResampleVideo(MediaExtractor extractor, MediaCodec decoder, SamplerClip clip) { ByteBuffer[] decoderInputBuffers = decoder.GetInputBuffers(); ByteBuffer[] encoderOutputBuffers = mEncoder.GetOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int outputCount = 0; long endTime = clip.getEndTime(); if (endTime == -1) { endTime = clip.getVideoDuration(); } bool outputDoneNextTimeWeCheck = false; bool outputDone = false; bool inputDone = false; bool decoderDone = false; while (!outputDone) { // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (extractor.SampleTime / 1000 >= endTime) { // End of stream -- send empty frame with EOS flag set. decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodecBufferFlags.EndOfStream); inputDone = true; } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.Clear(); int sampleSize = extractor.ReadSampleData(inputBuf, 0); if (sampleSize < 0) { decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); } else { decoder.QueueInputBuffer(inputBufIndex, 0, sampleSize, extractor.SampleTime, 0); extractor.Advance(); } inputChunk++; } } } // Assume output is available. Loop until both assumptions are false. bool decoderOutputAvailable = !decoderDone; bool encoderOutputAvailable = true; while (decoderOutputAvailable || encoderOutputAvailable) { // Start by draining any pending output from the encoder. It's important to // do this before we try to stuff any more data in. int encoderStatus = mEncoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { encoderOutputAvailable = false; } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { encoderOutputBuffers = mEncoder.GetOutputBuffers(); } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { MediaFormat newFormat = mEncoder.OutputFormat; mTrackIndex = mMuxer.AddTrack(newFormat); mMuxer.Start(); mMuxerStarted = true; } else if (encoderStatus < 0) { // fail( "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus ); } else { // encoderStatus >= 0 ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { // fail( "encoderOutputBuffer " + encoderStatus + " was null" ); } // Write the data to the output "file". if (info.Size != 0) { encodedData.Position(info.Offset); encodedData.Limit(info.Offset + info.Size); outputCount++; mMuxer.WriteSampleData(mTrackIndex, encodedData, info); } outputDone = (info.Flags & MediaCodecBufferFlags.EndOfStream) != 0; mEncoder.ReleaseOutputBuffer(encoderStatus, false); } if (outputDoneNextTimeWeCheck) { outputDone = true; } if (encoderStatus != (int)MediaCodecInfoState.TryAgainLater) { // Continue attempts to drain output. continue; } // Encoder is drained, check to see if we've got a new frame of output from // the decoder. (The output is going to a Surface, rather than a ByteBuffer, // but we still get information through BufferInfo.) if (!decoderDone) { int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater) { decoderOutputAvailable = false; } else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { // decoderOutputBuffers = decoder.GetOutputBuffers(); } else if (decoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { // expected before first buffer of data MediaFormat newFormat = decoder.OutputFormat; } else if (decoderStatus < 0) { // fail( "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus ); } else { // decoderStatus >= 0 // The ByteBuffers are null references, but we still get a nonzero // size for the decoded data. bool doRender = (info.Size != 0); // As soon as we call ReleaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.ReleaseOutputBuffer(decoderStatus, doRender); if (doRender) { mOutputSurface.AwaitNewImage(true); mOutputSurface.DrawImage(); // Send it to the encoder. long nSecs = info.PresentationTimeUs * 1000; if (clip.getStartTime() != -1) { nSecs = (info.PresentationTimeUs - (clip.getStartTime() * 1000)) * 1000; } nSecs = Java.Lang.Math.Max(0, nSecs); mEncoderPresentationTimeUs += (nSecs - mLastSampleTime); mLastSampleTime = nSecs; mInputSurface.SetPresentationTime(mEncoderPresentationTimeUs); mInputSurface.SwapBuffers(); } if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { // mEncoder.signalEndOfInputStream(); outputDoneNextTimeWeCheck = true; } } } } } if (inputChunk != outputCount) { // throw new RuntimeException( "frame lost: " + inputChunk + " in, " + outputCount + " out" ); } }
private void EncodeMux() { int TIMEOUT_USEC = 10000; ByteBuffer[] encoderInputBuffers = _Encoder.GetInputBuffers(); bool inputDone = false; int frameIndex = 0; try { while (true) { if (!inputDone) { int inputBufIndex = _Encoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { long ptsUsec = computePresentationTime(frameIndex); if (frameIndex == _ByteBuffers.Count) { // Send an empty frame with the end-of-stream flag set. If we set EOS on a frame with data, that frame data will be ignored, and the output will be short one frame. _Encoder.QueueInputBuffer(inputBufIndex, 0, 0, ptsUsec, MediaCodec.BufferFlagEndOfStream); inputDone = true; Log.Info(TAG, "sent input EOS (with zero-length frame)"); } else { Log.Info(TAG, string.Format("Adding _ByteBuffers image index {0} to encoder", frameIndex)); ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex]; var imagedata = _ByteBuffers[frameIndex]; int chunkSize = 0; if (imagedata == null) { Log.Warn(TAG, string.Format("Adding _ByteBuffers image index {0} to encoder", frameIndex)); } else { //old way don't need to do this anymore. //Bitmap b = GetBitmap(imagedata); //byte[] yuv = new byte[b.Width * b.Height * 3 / 2]; //int[] argb = new int[b.Width * b.Height]; //b.GetPixels(argb, 0, b.Width, 0, 0, b.Width, b.Height); //encodeYUV420SP(yuv, argb, b.Width, b.Height); //b.Recycle(); //old way don't need to do this anymore? //int[] argb = new int[imagedata.Width * imagedata.Height]; //imagedata.GetPixels(argb, 0, imagedata.Width, 0, 0, imagedata.Width, imagedata.Height); //byte[] yuv = new byte[imagedata.Width * imagedata.Height * 3 / 2]; //encodeYUV420SP(yuv, argb, imagedata.Width, imagedata.Height); //YuvImage yuv = GetYUVImage(imagedata); //byte[] decomB = Utils.DecompressFast(imagedata); //var yuv = new YuvImage(decomB, _CameraColorFormat, _Width, _Height, null); //Bitmap b = BitmapFactory.DecodeByteArray(imagedata, 0, imagedata.Length); //byte[] yuv = new byte[b.Width * b.Height * 3 / 2]; //int[] argb = new int[b.Width * b.Height]; //b.GetPixels(argb, 0, b.Width, 0, 0, b.Width, b.Height); //encodeYUV420SP(yuv, argb, b.Width, b.Height); Bitmap b = BitmapFactory.DecodeByteArray(imagedata, 0, imagedata.Length); byte[] yuv = new byte[b.Width * b.Height * 3 / 2]; int[] argb = new int[b.Width * b.Height]; b.GetPixels(argb, 0, b.Width, 0, 0, b.Width, b.Height); encodeYUV420SP(yuv, argb, b.Width, b.Height); var yuvimage = new YuvImage(yuv, _CameraColorFormat, _Width, _Height, null); var yuvarray = yuvimage.GetYuvData(); colorcorrection(ref yuvarray, b.Width, b.Height); //method for fixing common color matching issues see below for comments inputBuf.Put(yuvarray); chunkSize = yuvarray.Length; //yuv = null; //GC.Collect(); //essential to fix memory leak from new YuvImage allocation above b.Recycle(); } // the buffer should be sized to hold one full frame inputBuf.Clear(); _Encoder.QueueInputBuffer(inputBufIndex, 0, chunkSize, ptsUsec, 0); frameIndex++; } } else { // either all in use, or we timed out during initial setup Log.Warn(TAG, "input buffer not available"); } } ByteBuffer[] encoderOutputBuffers = _Encoder.GetOutputBuffers(); var mBufferInfo = new MediaCodec.BufferInfo(); int encoderStatus = _Encoder.DequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { Log.Info(TAG, "no output available, spinning to await EOS"); } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { // not expected for an encoder Log.Warn(TAG, "not expected OutputBuffersChanged happened"); encoderOutputBuffers = _Encoder.GetOutputBuffers(); } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { // should happen before receiving buffers, and should only happen once if (_MuxerStarted) { Log.Error(TAG, "format changed twice and should never happen"); throw new RuntimeException("format changed twice"); } MediaFormat newFormat = _Encoder.OutputFormat; Log.Info(TAG, "format changed and starting MUX"); _TrackIndex = _Muxer.AddTrack(newFormat); _Muxer.Start(); _MuxerStarted = true; } else if (encoderStatus < 0) { Log.Warn(TAG, "unexpected but lets ignore"); // let's ignore it } else { ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { Log.Error(TAG, string.Format("encoderOutputBuffer {0} was null!!", encoderStatus)); throw new RuntimeException(string.Format("encoderOutputBuffer {0} was null!!", encoderStatus)); } if ((mBufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0) { // The codec config data was pulled out and fed to the muxer when we got // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. mBufferInfo.Size = 0; } if (mBufferInfo.Size != 0) { if (!_MuxerStarted) { Log.Error(TAG, "muxer hasnt started!!"); throw new RuntimeException("muxer hasnt started"); } // adjust the ByteBuffer values to match BufferInfo (not needed?) old //encodedData.Position(mBufferInfo.Offset); //encodedData.Limit(mBufferInfo.Offset + this.mBufferInfo.Size); _Muxer.WriteSampleData(_TrackIndex, encodedData, mBufferInfo); Log.Info(TAG, string.Format("{0} bytes to muxer", mBufferInfo.Size)); } _Encoder.ReleaseOutputBuffer(encoderStatus, false); if ((mBufferInfo.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Log.Info(TAG, "End of Stream Reached!!"); break; } } } } catch (Exception e) { Log.Error(TAG, "Decode or Muxer failed", e, e.Message); throw; } }
public override void OnOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) { ByteBuffer outputBuffer = codec.GetOutputBuffer(index); byte[] outputArray = new byte[outputBuffer.Remaining()]; outputBuffer.Get(outputArray); DatagramPacket packet = new DatagramPacket(outputArray, outputArray.Length, InetAddress.GetByAddress(new byte[] { 192, 168, 0, 31 }), 9482); _udpSocket.Send(packet); codec.ReleaseOutputBuffer(index, false); }
/** * Checks the video data. * * @return the number of bad frames */ private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) { const int TIMEOUT_USEC = 1000; ByteBuffer[] decoderInputBuffers = decoder.GetInputBuffers(); ByteBuffer[] decoderOutputBuffers = decoder.GetOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int checkIndex = 0; int badFrames = 0; bool outputDone = false; bool inputDone = false; while (!outputDone) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "check loop"); } // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.NumChunks) { // End of stream -- send empty frame with EOS flag set. decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BufferFlagEndOfStream); inputDone = true; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "sent input EOS"); } } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.Clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.QueueInputBuffer(inputBufIndex, 0, inputBuf.Position(), time, (MediaCodecBufferFlags)flags); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.Position() + " flags=" + flags); } inputChunk++; } } else { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "input buffer not available"); } } } if (!outputDone) { int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == (int)MediaCodec.InfoTryAgainLater) { // no output available yet if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "no output from decoder available"); } } else if (decoderStatus == (int)MediaCodec.InfoOutputBuffersChanged) { decoderOutputBuffers = decoder.GetOutputBuffers(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output buffers changed"); } } else if (decoderStatus == (int)MediaCodec.InfoOutputFormatChanged) { MediaFormat newFormat = decoder.OutputFormat; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output format changed: " + newFormat); } } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else // decoderStatus >= 0 { ByteBuffer decodedData = decoderOutputBuffers[decoderStatus]; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.Size + ")"); } if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "output EOS"); } outputDone = true; } bool doRender = (info.Size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't guarantee // that the texture will be available before the call returns, so we // need to wait for the onFrameAvailable callback to fire. decoder.ReleaseOutputBuffer(decoderStatus, doRender); if (doRender) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "awaiting frame " + checkIndex); } assertEquals("Wrong time stamp", computePresentationTime(checkIndex), info.PresentationTimeUs); surface.AwaitNewImage(); surface.DrawImage(); if (!checkSurfaceFrame(checkIndex++)) { badFrames++; } } } } } return(badFrames); }
/** * Edits a stream of video data. */ private void editVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder, VideoChunks outputData) { const int TIMEOUT_USEC = 10000; ByteBuffer[] decoderInputBuffers = decoder.GetInputBuffers(); ByteBuffer[] encoderOutputBuffers = encoder.GetOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int outputCount = 0; bool outputDone = false; bool inputDone = false; bool decoderDone = false; while (!outputDone) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "edit loop"); } // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.NumChunks) { // End of stream -- send empty frame with EOS flag set. decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodecBufferFlags.EndOfStream); inputDone = true; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "sent input EOS (with zero-length frame)"); } } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.Clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.QueueInputBuffer(inputBufIndex, 0, inputBuf.Position(), time, (MediaCodecBufferFlags)flags); // TODO: Not sure if it's MediaCodecBufferFlags, verify. if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.Position() + " flags=" + flags); } inputChunk++; } } else { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "input buffer not available"); } } } // Assume output is available. Loop until both assumptions are false. bool decoderOutputAvailable = !decoderDone; bool encoderOutputAvailable = true; while (decoderOutputAvailable || encoderOutputAvailable) { // Start by draining any pending output from the encoder. It's important to // do this before we try to stuff any more data in. int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { // no output available yet if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "no output from encoder available"); } encoderOutputAvailable = false; } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { encoderOutputBuffers = encoder.GetOutputBuffers(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output buffers changed"); } } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { MediaFormat newFormat = encoder.OutputFormat; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output format changed: " + newFormat); } } else if (encoderStatus < 0) { fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else // encoderStatus >= 0 { ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { fail("encoderOutputBuffer " + encoderStatus + " was null"); } // Write the data to the output "file". if (info.Size != 0) { encodedData.Position(info.Offset); encodedData.Limit(info.Offset + info.Size); outputData.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs); outputCount++; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output " + info.Size + " bytes"); } } outputDone = (info.Flags & MediaCodec.BufferFlagEndOfStream) != 0; encoder.ReleaseOutputBuffer(encoderStatus, false); } if (encoderStatus != (int)MediaCodec.InfoTryAgainLater) { // Continue attempts to drain output. continue; } // Encoder is drained, check to see if we've got a new frame of output from // the decoder. (The output is going to a Surface, rather than a ByteBuffer, // but we still get information through BufferInfo.) if (!decoderDone) { int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == (int)MediaCodec.InfoTryAgainLater) { // no output available yet if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "no output from decoder available"); } decoderOutputAvailable = false; } else if (decoderStatus == (int)MediaCodec.InfoOutputBuffersChanged) { //decoderOutputBuffers = decoder.getOutputBuffers(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output buffers changed (we don't care)"); } } else if (decoderStatus == (int)MediaCodec.InfoOutputFormatChanged) { // expected before first buffer of data MediaFormat newFormat = decoder.OutputFormat; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "decoder output format changed: " + newFormat); } } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else // decoderStatus >= 0 { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.Size + "("); } // The ByteBuffers are null references, but we still get a nonzero // size for the decoded data. bool doRender = (info.Size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.ReleaseOutputBuffer(decoderStatus, doRender); if (doRender) { // This waits for the image and renders it after it arrives. if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "awaiting frame"); } outputSurface.AwaitNewImage(); outputSurface.DrawImage(); // Send it to the encoder. inputSurface.SetPresentationTime(info.PresentationTimeUs * 1000); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "swapBuffers"); } inputSurface.SwapBuffers(); } if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0) { // forward decoder EOS to encoder if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "signaling input EOS"); } if (WORK_AROUND_BUGS) { // Bail early, possibly dropping a frame. return; } else { encoder.SignalEndOfInputStream(); } } } } } } if (inputChunk != outputCount) { throw new RuntimeException("frame lost: " + inputChunk + " in, " + outputCount + " out"); } }
/** * Generates video frames, feeds them into the encoder, and writes the output to the * VideoChunks instance. */ private void generateVideoData(MediaCodec encoder, InputSurface inputSurface, VideoChunks output) { ByteBuffer[] encoderOutputBuffers = encoder.GetOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int generateIndex = 0; int outputCount = 0; // Loop until the output side is done. bool inputDone = false; bool outputDone = false; while (!outputDone) { if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "gen loop"); } // If we're not done submitting frames, generate a new one and submit it. The // eglSwapBuffers call will block if the input is full. if (!inputDone) { if (generateIndex == NUM_FRAMES) { // Send an empty frame with the end-of-stream flag set. if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "signaling input EOS"); } if (WORK_AROUND_BUGS) { // Might drop a frame, but at least we won't crash mediaserver. try { Thread.Sleep(500); } catch (InterruptedException ie) {} outputDone = true; } else { encoder.SignalEndOfInputStream(); } inputDone = true; } else { generateSurfaceFrame(generateIndex); inputSurface.SetPresentationTime(computePresentationTime(generateIndex) * 1000); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "inputSurface swapBuffers"); } inputSurface.SwapBuffers(); } generateIndex++; } // Check for output from the encoder. If there's no output yet, we either need to // provide more input, or we need to wait for the encoder to work its magic. We // can't actually tell which is the case, so if we can't get an output buffer right // away we loop around and see if it wants more input. // // If we do find output, drain it all before supplying more input. while (true) { int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { // no output available yet if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "no output from encoder available"); } break; // out of while } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { // not expected for an encoder encoderOutputBuffers = encoder.GetOutputBuffers(); if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output buffers changed"); } } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { // not expected for an encoder MediaFormat newFormat = encoder.OutputFormat; if (AppSettings.Logging.SendToConsole) { Log.Debug(TAG, "encoder output format changed: " + newFormat); } } else if (encoderStatus < 0) { fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else // encoderStatus >= 0 { ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { fail("encoderOutputBuffer " + encoderStatus + " was null"); } // Codec config flag must be set iff this is the first chunk of output. This // may not hold for all codecs, but it appears to be the case for video/avc. assertTrue((info.Flags & MediaCodec.BufferFlagCodecConfig) != 0 || outputCount != 0); if (info.Size != 0) { // Adjust the ByteBuffer values to match BufferInfo. encodedData.Position(info.Offset); encodedData.Limit(info.Offset + info.Size); output.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs); outputCount++; } encoder.ReleaseOutputBuffer(encoderStatus, false); if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0) { outputDone = true; break; // out of while } } } } assertEquals("Frame count", NUM_FRAMES + 1, outputCount); }
public void Decode(MediaCodec _Decoder, MediaExtractor extractor) { Stopwatch s = new Stopwatch(); s.Start(); int TIMEOUT_USEC = 10000; ByteBuffer[] encoderInputBuffers = _Decoder.GetInputBuffers(); ByteBuffer[] outputBuffers = _Decoder.GetOutputBuffers(); var mBufferInfo = new MediaCodec.BufferInfo(); bool inputDone = false; var index = 0; try { while (true) { if (!inputDone) { int inputBufIndex = _Decoder.DequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { ByteBuffer buffer = encoderInputBuffers[inputBufIndex]; //long ptsUsec = computePresentationTime(frameIndex); int sampleSize = extractor.ReadSampleData(buffer, 0); if (sampleSize < 0) { // Send an empty frame with the end-of-stream flag set. If we set EOS on a frame with data, that frame data will be ignored, and the output will be short one frame. _Decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0, MediaCodec.BufferFlagEndOfStream); inputDone = true; Log.Info(TAG, "sent input EOS (with zero-length frame)"); } else { Log.Info(TAG, "adding encoded video to decoder input "); _Decoder.QueueInputBuffer(inputBufIndex, 0, sampleSize, extractor.SampleTime, 0); extractor.Advance(); } } else { // either all in use, or we timed out during initial setup Log.Warn(TAG, "input buffer not available"); } } //ByteBuffer[] encoderOutputBuffers = _Decoder.GetOutputBuffers(); int encoderStatus = _Decoder.DequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater) { Log.Info(TAG, "no output available, spinning to await EOS"); } else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { // not expected for an encoder Log.Warn(TAG, "not expected OutputBuffersChanged happened"); outputBuffers = _Decoder.GetOutputBuffers(); } else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged) { // should happen before receiving buffers, and should only happen once //if (_MuxerStarted) //{ // Log.Error(TAG, "format changed twice and should never happen"); // throw new RuntimeException("format changed twice"); //} //MediaFormat newFormat = _Decoder.OutputFormat; //Log.Info(TAG, "format changed and starting MUX"); //_TrackIndex = _Muxer.AddTrack(newFormat); //_Muxer.Start(); //_MuxerStarted = true; } else if (encoderStatus < 0) { Log.Warn(TAG, "unexpected but lets ignore"); // let's ignore it } else { ByteBuffer encodedData = outputBuffers[encoderStatus]; if (encodedData == null) { Log.Error(TAG, string.Format("encoderOutputBuffer {0} was null!!", encoderStatus)); throw new RuntimeException(string.Format("encoderOutputBuffer {0} was null!!", encoderStatus)); } if ((mBufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0) { // The codec config data was pulled out and fed to the muxer when we got // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. mBufferInfo.Size = 0; } if (mBufferInfo.Size != 0) { //if (!_MuxerStarted) //{ // Log.Error(TAG, "muxer hasnt started!!"); // throw new RuntimeException("muxer hasnt started"); //} // adjust the ByteBuffer values to match BufferInfo (not needed?) old //encodedData.Position(mBufferInfo.Offset); //encodedData.Limit(mBufferInfo.Offset + this.mBufferInfo.Size); try { //byte[] dst = new byte[outputBuffers[encoderStatus].Capacity()]; //outputBuffers[encoderStatus].Get(dst); //ByteBuffer buffer = outputBuffers[encoderStatus]; //byte[] ba = new byte[encodedData.Remaining()]; //encodedData.Get(ba); //ByteBuffer buffer = outputBuffers[encoderStatus]; //buffer.Position(mBufferInfo.Offset); //buffer.Limit(mBufferInfo.Offset + mBufferInfo.Size); //byte[] ba = new byte[buffer.Remaining()]; //buffer.Get(ba); //if (index < 10) //{ YuvImage yuv = Utils.GetYUVImage(encodedData, _CameraColorFormat, _Width, _Height); //var imagedata = yuv.GetYuvData(); //Utils.swapNV21_NV12(ref imagedata, _Width, _Height); //Image might need to be corrected later //Bitmap b = Utils.GetBitmap(yuv, _Width, _Height); //Bitmap bmp = BitmapFactory.DecodeByteArray(ba, 0, ba.Length);// this return null //var createfilepath = new File(_downloadsfilesdir, DateTime.Now.Ticks + ".png").AbsolutePath; //using (FileStream bos = new FileStream(createfilepath, FileMode.CreateNew)) //{ // b.Compress(Bitmap.CompressFormat.Png, 100, bos); //} //b.Recycle(); //} index++; //writeFrameToSDCard(dst, i, dst.length); //i++; } catch (Exception e) { //Log("iDecodeActivity", "Error while creating bitmap with: "); } _Decoder.ReleaseOutputBuffer(encoderStatus, false); } if ((mBufferInfo.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Log.Info(TAG, "End of Stream Reached!!"); break; } } } s.Stop(); Log.Info("inner STOPWATCH!!!!:", string.Format("numberofframes = {0}, totaltime = {1}", index, s.ElapsedMilliseconds)); } catch (Exception e) { Log.Error(TAG, "Decode or Muxer failed", e, e.Message); throw; } }