private static String printAudioMetadata(Context context, MediaFormat mediaFormat) { if (mediaFormat == null) { return("\n"); } StringBuilder stringBuilder = new StringBuilder(); if (mediaFormat.ContainsKey(MediaFormat.KeyMime)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_mime_type, mediaFormat.GetString(MediaFormat.KeyMime))); } if (mediaFormat.ContainsKey(MediaFormat.KeyChannelCount)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_channel_count, mediaFormat.GetInteger(MediaFormat.KeyChannelCount))); } if (mediaFormat.ContainsKey(MediaFormat.KeyBitRate)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_bitrate, mediaFormat.GetInteger(MediaFormat.KeyBitRate))); } if (mediaFormat.ContainsKey(MediaFormat.KeyDuration)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_duration, mediaFormat.GetLong(MediaFormat.KeyDuration))); } if (mediaFormat.ContainsKey(MediaFormat.KeySampleRate)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_sampling_rate, mediaFormat.GetInteger(MediaFormat.KeySampleRate))); } return(stringBuilder.ToString()); }
public static int GetMediaFormatPropertyInteger(Uri uri, string key, int defaultValue) { int value = defaultValue; MediaExtractor extractor = new MediaExtractor(); try { extractor.SetDataSource(uri.ToString()); } catch (System.Exception e) { return(value); } MediaFormat format = GetTrackFormat(extractor, MIME_TYPE_AVC); extractor.Release(); if (format.ContainsKey(key)) { value = format.GetInteger(key); } return(value); }
private long getLong(MediaFormat mediaFormat, string key) { if (mediaFormat.ContainsKey(key)) { return mediaFormat.GetLong(key); } return -1; }
private int getInt(MediaFormat mediaFormat, string key, int defaultValue) { if (mediaFormat.ContainsKey(key)) { return mediaFormat.GetInteger(key); } return defaultValue; }
public static Java.Lang.String printTransformationStats(Context context, IList <TrackTransformationInfo> stats) { if (stats == null || stats.Any() == false) { return(new Java.Lang.String(context.GetString(Resource.String.no_transformation_stats))); } StringBuilder statsStringBuilder = new StringBuilder(); for (int track = 0; track < stats.Count; track++) { statsStringBuilder.AppendLine(context.GetString(Resource.String.stats_track, track)); TrackTransformationInfo trackTransformationInfo = stats[track]; MediaFormat sourceFormat = trackTransformationInfo.SourceFormat; String mimeType = null; if (sourceFormat.ContainsKey(MediaFormat.KeyMime)) { mimeType = sourceFormat.GetString(MediaFormat.KeyMime); } if (mimeType != null && mimeType.StartsWith("video")) { statsStringBuilder.AppendLine(context.GetString(Resource.String.stats_source_format)) .AppendLine(printVideoMetadata(context, sourceFormat)) .AppendLine(context.GetString(Resource.String.stats_target_format)) .AppendLine(printVideoMetadata(context, trackTransformationInfo.TargetFormat)); } else if (mimeType != null && mimeType.StartsWith("audio")) { statsStringBuilder.AppendLine(context.GetString(Resource.String.stats_source_format)) .AppendLine(printAudioMetadata(context, sourceFormat)) .AppendLine(context.GetString(Resource.String.stats_target_format)) .AppendLine(printAudioMetadata(context, trackTransformationInfo.TargetFormat)); } else if (mimeType != null && mimeType.StartsWith("image")) { statsStringBuilder.AppendLine(context.GetString(Resource.String.stats_source_format)) .AppendLine(printImageMetadata(context, sourceFormat)) .AppendLine(context.GetString(Resource.String.stats_target_format)) .AppendLine(printImageMetadata(context, trackTransformationInfo.TargetFormat)); } else { statsStringBuilder.AppendLine(context.GetString(Resource.String.stats_source_format)) .AppendLine(printGenericMetadata(context, sourceFormat)) .AppendLine(context.GetString(Resource.String.stats_target_format)) .AppendLine(printGenericMetadata(context, trackTransformationInfo.TargetFormat)); } statsStringBuilder.AppendLine(context.GetString(Resource.String.stats_decoder, trackTransformationInfo.DecoderCodec)) .AppendLine(context.GetString(Resource.String.stats_encoder, trackTransformationInfo.EncoderCodec)) .AppendLine(context.GetString(Resource.String.stats_transformation_duration, trackTransformationInfo.Duration)) .AppendLine("\n"); } return(new Java.Lang.String(statsStringBuilder.ToString())); }
private static String printGenericMetadata(Context context, MediaFormat mediaFormat) { if (mediaFormat == null) { return("\n"); } if (mediaFormat.ContainsKey(MediaFormat.KeyMime)) { return(context.GetString(Resource.String.stats_mime_type, mediaFormat.GetString(MediaFormat.KeyMime))); } return("\n"); }
private static String printImageMetadata(Context context, MediaFormat mediaFormat) { if (mediaFormat == null) { return("\n"); } StringBuilder stringBuilder = new StringBuilder(); if (mediaFormat.ContainsKey(MediaFormat.KeyMime)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_mime_type, mediaFormat.GetString(MediaFormat.KeyMime))); } if (mediaFormat.ContainsKey(MediaFormat.KeyWidth)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_width, mediaFormat.GetInteger(MediaFormat.KeyWidth))); } if (mediaFormat.ContainsKey(MediaFormat.KeyHeight)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_height, mediaFormat.GetInteger(MediaFormat.KeyHeight))); } return(stringBuilder.ToString()); }
private static String printVideoMetadata(Context context, MediaFormat mediaFormat) { if (mediaFormat == null) { return("\n"); } StringBuilder stringBuilder = new StringBuilder(); if (mediaFormat.ContainsKey(MediaFormat.KeyMime)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_mime_type, mediaFormat.GetString(MediaFormat.KeyMime))); } if (mediaFormat.ContainsKey(MediaFormat.KeyWidth)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_width, mediaFormat.GetInteger(MediaFormat.KeyWidth))); } if (mediaFormat.ContainsKey(MediaFormat.KeyHeight)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_height, mediaFormat.GetInteger(MediaFormat.KeyHeight))); } if (mediaFormat.ContainsKey(MediaFormat.KeyBitRate)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_bitrate, mediaFormat.GetInteger(MediaFormat.KeyBitRate))); } if (mediaFormat.ContainsKey(MediaFormat.KeyDuration)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_duration, mediaFormat.GetLong(MediaFormat.KeyDuration))); } if (mediaFormat.ContainsKey(MediaFormat.KeyFrameRate)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_frame_rate, MediaFormatUtils.GetFrameRate(mediaFormat, new Java.Lang.Integer(0)).IntValue())); } if (mediaFormat.ContainsKey(MediaFormat.KeyIFrameInterval)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_key_frame_interval, MediaFormatUtils.GetIFrameInterval(mediaFormat, new Java.Lang.Integer(0)).IntValue())); } if (mediaFormat.ContainsKey(KEY_ROTATION)) { stringBuilder.AppendLine(context.GetString(Resource.String.stats_rotation, mediaFormat.GetInteger(KEY_ROTATION))); } return(stringBuilder.ToString()); }
public static MediaFormat GetAudioTrackFormat(string filepath, Android.Net.Uri inputUri = null) { MediaExtractor extractor = new MediaExtractor(); if (inputUri != null) { extractor.SetDataSource(Android.App.Application.Context, inputUri, null); } else if (filepath != null) { extractor.SetDataSource(filepath); } int trackCount = extractor.TrackCount; int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = false; } if (selectCurrentTrack) { extractor.SelectTrack(i); if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } return(format); } } return(null); }
private static bool genVideoUsingMuxer(String srcPath, String dstPath, long startMicroSeconds, long endMicroSeconds, bool useAudio, bool useVideo) { if (startMicroSeconds == endMicroSeconds) { throw new InvalidParameterException("You shit!! end has to be greater than start!!"); } // Set up MediaExtractor to read from the source. MediaExtractor extractor = new MediaExtractor(); extractor.SetDataSource(srcPath); int trackCount = extractor.TrackCount; // Set up MediaMuxer for the destination. var muxer = new MediaMuxer(dstPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); // Set up the tracks and retrieve the max buffer size for selected // tracks. Dictionary <int, int> indexMap = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.GetTrackFormat(i); String mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/") && useAudio) { selectCurrentTrack = true; } else if (mime.StartsWith("video/") && useVideo) { selectCurrentTrack = true; } if (selectCurrentTrack) { extractor.SelectTrack(i); int dstIndex = muxer.AddTrack(format); indexMap.Add(i, dstIndex); if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize? newSize : bufferSize; } } } if (bufferSize < 0) { bufferSize = DEFAULT_BUFFER_SIZE; } // Set up the orientation and starting time for extractor. MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); retrieverSrc.SetDataSource(srcPath); String degreesString = retrieverSrc.ExtractMetadata(MediaMetadataRetriever.MetadataKeyVideoRotation); if (degreesString != null) { int degrees = Integer.ParseInt(degreesString); if (degrees >= 0) { muxer.SetOrientationHint(degrees); } } if (startMicroSeconds > 0) { extractor.SeekTo(startMicroSeconds, MediaExtractor.SeekToClosestSync); } // Copy the samples from MediaExtractor to MediaMuxer. We will loop // for copying each sample and stop when we get to the end of the source // file or exceed the end time of the trimming. int offset = 0; int trackIndex = -1; ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); try { muxer.Start(); while (true) { bufferInfo.Offset = offset; bufferInfo.Size = extractor.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { Log.Info(LOGTAG, "Saw input EOS."); bufferInfo.Size = 0; break; } else { bufferInfo.PresentationTimeUs = extractor.SampleTime; if (endMicroSeconds > 0 && bufferInfo.PresentationTimeUs > endMicroSeconds) { Log.Info(LOGTAG, "The current sample is over the trim end time."); break; } else { bufferInfo.Flags = GetSyncsampleflags(extractor.SampleFlags); //had to map this shit not sure if its right trackIndex = extractor.SampleTrackIndex; muxer.WriteSampleData(indexMap[trackIndex], dstBuf, bufferInfo); extractor.Advance(); } } } muxer.Stop(); } catch (IllegalStateException e) { // Swallow the exception due to malformed source. Log.Info(LOGTAG, "The source video file is malformed"); return(false); } finally { muxer.Release(); } return(true); }
/// <summary> /// if both inputPath string and inputUri are not null, this /// method will use the Uri. Else, set one or the other /// /// They cannot both be null /// </summary> /// <param name="startMs">the start ms for trimming</param> /// <param name="endMs">the final ms for trimming</param> /// <param name="inputPath">optional input path string</param> /// <param name="muxer">the muxer to use for writing bytes</param> /// <param name="trackIndexOverride">the track index for muxer read/write to</param> /// <param name="bufferInfo">an input bufferinfo to get properties from</param> /// <param name="outputPath">the output path for method to check after finished encoding</param> /// <param name="ptOffset">the presentation time offset for audio, used in syncing audio and video</param> /// <param name="inputUri">optional inputUri to read from</param> /// <returns></returns> public async Task <string> HybridMuxingTrimmer(int startMs, int endMs, string inputPath, MediaMuxer muxer, int trackIndexOverride = -1, BufferInfo bufferInfo = null, string outputPath = null, long ptOffset = 0, Android.Net.Uri inputUri = null) { var tio = trackIndexOverride; await Task.Run(() => { if (outputPath == null) { outputPath = FileToMp4.LatestOutputPath; } MediaExtractor ext = new MediaExtractor(); if (inputUri != null) { ext.SetDataSource(Android.App.Application.Context, inputUri, null); } else { ext.SetDataSource(inputPath); } int trackCount = ext.TrackCount; Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = ext.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = false; } /*rerouted to gl video encoder*/ if (selectCurrentTrack) { ext.SelectTrack(i); if (tio != -1) { indexDict.Add(i, i); } if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } } } MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); if (!System.String.IsNullOrWhiteSpace(inputPath)) { retrieverSrc.SetDataSource(inputPath); } else { retrieverSrc.SetDataSource(Android.App.Application.Context, inputUri); } string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation); if (degreesString != null) // unused ATM but will be useful for stabilized videoview in streaming { int degrees = int.Parse(degreesString); if (degrees >= 0) /* muxer.SetOrientationHint(degrees); */ } { //muxer won't accept this param once started } if (startMs > 0) { ext.SeekTo(startMs * 1000, MediaExtractorSeekTo.ClosestSync); } int offset = 0; if (bufferInfo == null) { bufferInfo = new MediaCodec.BufferInfo(); } ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); long us = endMs * 1000; long uo = us + ptOffset; int cf = 0; try { FileToMp4.AudioEncodingInProgress = true; while (true) { bufferInfo.Offset = offset; bufferInfo.Size = ext.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { bufferInfo.Size = 0; break; } else { cf++; bufferInfo.PresentationTimeUs = ext.SampleTime + ptOffset; if (ext.SampleTime >= us) { break; } //out of while else { bufferInfo.Flags = MFlags2MCodecBuff(ext.SampleFlags); if (tio == -1) { muxer.WriteSampleData(FileToMp4.LatestAudioTrackIndex, dstBuf, bufferInfo); } else { muxer.WriteSampleData(tio, dstBuf, bufferInfo); } if (cf >= 240) //only send the muxer eventargs once every x frames to reduce CPU load { Notify(ext.SampleTime, us); cf = 0; } } ext.Advance(); } } } catch (Java.Lang.IllegalStateException e) { this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true)); Console.WriteLine("The source video file is malformed"); } catch (Java.Lang.Exception ex) { this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true)); Console.WriteLine(ex.Message); } if (AppSettings.Logging.SendToConsole) { System.Console.WriteLine($"DrainEncoder audio finished @ {bufferInfo.PresentationTimeUs}"); } }); FileToMp4.AudioEncodingInProgress = false; try { if (!FileToMp4.VideoEncodingInProgress) { muxer.Stop(); muxer.Release(); muxer = null; } } catch (Java.Lang.Exception ex) { Log.Debug("MuxingEncoder", ex.Message); } if (outputPath != null) { var success = System.IO.File.Exists(outputPath); if (success) { this.Progress.Invoke(new MuxerEventArgs(endMs * 1000, endMs, outputPath, true)); return(outputPath); } } return(null); //nothing to look for }
public Task <bool> TrimAsync(int startMS, int lengthMS, string inputPath, string outputPath) { return(Task.Run <bool>(() => { try { bool didOperationSucceed = false; MediaExtractor extractor = new MediaExtractor(); extractor.SetDataSource(inputPath); int trackCount = extractor.TrackCount; // Set up MediaMuxer for the destination. MediaMuxer muxer; muxer = new MediaMuxer(outputPath, MuxerOutputType.Mpeg4); // Set up the tracks and retrieve the max buffer size for selected // tracks. Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = true; } if (selectCurrentTrack) { extractor.SelectTrack(i); int dstIndex = muxer.AddTrack(format); indexDict.Add(i, dstIndex); if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } } } if (bufferSize < 0) { bufferSize = 1337; //TODO: I don't know what to put here tbh, it will most likely be above 0 at this point anyways :) } // Set up the orientation and starting time for extractor. MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); retrieverSrc.SetDataSource(inputPath); string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation); if (degreesString != null) { int degrees = int.Parse(degreesString); if (degrees >= 0) { muxer.SetOrientationHint(degrees); } } if (startMS > 0) { extractor.SeekTo(startMS * 1000, MediaExtractorSeekTo.ClosestSync); } // Copy the samples from MediaExtractor to MediaMuxer. We will loop // for copying each sample and stop when we get to the end of the source // file or exceed the end time of the trimming. int offset = 0; int trackIndex = -1; ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); try { muxer.Start(); while (true) { bufferInfo.Offset = offset; bufferInfo.Size = extractor.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { bufferInfo.Size = 0; break; } else { bufferInfo.PresentationTimeUs = extractor.SampleTime; if (lengthMS > 0 && bufferInfo.PresentationTimeUs > ((startMS + lengthMS - 1) * 1000)) { Console.WriteLine("The current sample is over the trim end time."); break; } else { bufferInfo.Flags = ConvertMediaExtractorSampleFlagsToMediaCodecBufferFlags(extractor.SampleFlags); trackIndex = extractor.SampleTrackIndex; muxer.WriteSampleData(indexDict[trackIndex], dstBuf, bufferInfo); extractor.Advance(); } } } muxer.Stop(); didOperationSucceed = true; //deleting the old file //JFile file = new JFile(srcPath); //file.Delete(); } catch (IllegalStateException e) { // Swallow the exception due to malformed source. Console.WriteLine("The source video file is malformed"); } finally { muxer.Release(); } return didOperationSucceed; } catch (System.Exception xx) { return false; } })); // Set up MediaExtractor to read from the source. }
/** * Converts the image obtained from the decoder to NV21. */ private void convertToNV21(int k) { byte[] buffer = new byte[3 * mSize / 2]; int stride = mWidth, sliceHeight = mHeight; int colorFormat = mDecoderColorFormat; bool planar = false; if (mDecOutputFormat != null) { MediaFormat format = mDecOutputFormat; if (format != null) { if (format.ContainsKey("slice-height")) { sliceHeight = format.GetInteger("slice-height"); if (sliceHeight < mHeight) { sliceHeight = mHeight; } } if (format.ContainsKey("stride")) { stride = format.GetInteger("stride"); if (stride < mWidth) { stride = mWidth; } } if (format.ContainsKey(MediaFormat.KeyColorFormat) && format.GetInteger(MediaFormat.KeyColorFormat) > 0) { colorFormat = format.GetInteger(MediaFormat.KeyColorFormat); } } } switch ((MediaCodecCapabilities)colorFormat) { case MediaCodecCapabilities.Formatyuv420semiplanar: case MediaCodecCapabilities.Formatyuv420packedsemiplanar: case MediaCodecCapabilities.TiFormatyuv420packedsemiplanar: planar = false; break; case MediaCodecCapabilities.Formatyuv420planar: case MediaCodecCapabilities.Formatyuv420packedplanar: planar = true; break; } for (int i = 0; i < mSize; i++) { if (i % mWidth == 0) { i += stride - mWidth; } buffer[i] = mDecodedVideo[k][i]; } if (!planar) { for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1) { if (i % mWidth / 2 == 0) { i += (stride - mWidth) / 2; } buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + 2 * i]; buffer[mSize + 2 * j] = mDecodedVideo[k][stride * sliceHeight + 2 * i + 1]; } } else { for (int i = 0, j = 0; j < mSize / 4; i += 1, j += 1) { if (i % mWidth / 2 == 0) { i += (stride - mWidth) / 2; } buffer[mSize + 2 * j + 1] = mDecodedVideo[k][stride * sliceHeight + i]; buffer[mSize + 2 * j] = mDecodedVideo[k][stride * sliceHeight * 5 / 4 + i]; } } mDecodedVideo[k] = buffer; }