partial void InitializeImpl() { using (var inputFile = new Java.IO.File(mediaDataUrl)) { if (!inputFile.CanRead()) { throw new Exception(string.Format("Unable to read: {0} ", inputFile.AbsolutePath)); } using (var inputFileStream = new Java.IO.FileInputStream(inputFile.AbsolutePath)) { var audioMediaExtractor = new MediaExtractor(); audioMediaExtractor.SetDataSource(inputFileStream.FD, startPosition, length); var trackIndexAudio = StreamedBufferSoundSource.FindAudioTrack(audioMediaExtractor); if (trackIndexAudio < 0) { return; } audioMediaExtractor.SelectTrack(trackIndexAudio); var audioFormat = audioMediaExtractor.GetTrackFormat(trackIndexAudio); //Get the audio settings //should we override the settings (channels, sampleRate, ...) from DynamicSoundSource? Channels = audioFormat.GetInteger(MediaFormat.KeyChannelCount); SampleRate = audioFormat.GetInteger(MediaFormat.KeySampleRate); MediaDuration = TimeSpanExtensions.FromMicroSeconds(audioFormat.GetLong(MediaFormat.KeyDuration)); } } }
private void FeedClipToEncoder(SamplerClip clip) { mLastSampleTime = 0; MediaCodec decoder = null; MediaExtractor extractor = SetupExtractorForClip(clip); if (extractor == null) { return; } int trackIndex = GetVideoTrackIndex(extractor); extractor.SelectTrack(trackIndex); MediaFormat clipFormat = extractor.GetTrackFormat(trackIndex); if (clip.getStartTime() != -1) { extractor.SeekTo(clip.getStartTime() * 1000, MediaExtractorSeekTo.PreviousSync); clip.setStartTime(extractor.SampleTime / 1000); } try { decoder = MediaCodec.CreateDecoderByType(MediaHelper.MIME_TYPE_AVC); mOutputSurface = new OutputSurface(); decoder.Configure(clipFormat, mOutputSurface.Surface, null, 0); decoder.Start(); ResampleVideo(extractor, decoder, clip); } catch (System.Exception e) { } finally { if (mOutputSurface != null) { mOutputSurface.Release(); } if (decoder != null) { decoder.Stop(); decoder.Release(); } if (extractor != null) { extractor.Release(); extractor = null; } } }
public void Initialize(IServiceRegistry services, string url, long startPosition, long length) { if (isInitialized) { return; } try { inputFile = new Java.IO.File(url); if (!inputFile.CanRead()) { throw new Exception(string.Format("Unable to read: {0} ", inputFile.AbsolutePath)); } inputFileDescriptor = new Java.IO.FileInputStream(inputFile); // =================================================================================================== // Initialize the audio media extractor mediaExtractor = new MediaExtractor(); mediaExtractor.SetDataSource(inputFileDescriptor.FD, startPosition, length); var videoTrackIndex = FindTrack(mediaExtractor, MediaType.Video); var audioTrackIndex = FindTrack(mediaExtractor, MediaType.Audio); HasAudioTrack = audioTrackIndex >= 0; mediaTrackIndex = MediaType == MediaType.Audio ? audioTrackIndex : videoTrackIndex; if (mediaTrackIndex < 0) { throw new Exception(string.Format($"No {MediaType} track found in: {inputFile.AbsolutePath}")); } mediaExtractor.SelectTrack(mediaTrackIndex); var trackFormat = mediaExtractor.GetTrackFormat(mediaTrackIndex); MediaDuration = TimeSpanExtensions.FromMicroSeconds(trackFormat.GetLong(MediaFormat.KeyDuration)); ExtractMediaMetadata(trackFormat); // Create a MediaCodec mediadecoder, and configure it with the MediaFormat from the mediaExtractor // It's very important to use the format from the mediaExtractor because it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. var mime = trackFormat.GetString(MediaFormat.KeyMime); MediaDecoder = MediaCodec.CreateDecoderByType(mime); MediaDecoder.Configure(trackFormat, decoderOutputSurface, null, 0); isInitialized = true; StartWorker(); } catch (Exception e) { Release(); throw e; } }
MediaFormat SelectFirstVideoTrack() { for (var i = 0; i < extractor.TrackCount; i++) { var format = extractor.GetTrackFormat(i); var mime = format.GetString(MediaFormat.KeyMime); if (mime.StartsWith("video/")) { extractor.SelectTrack(i); return(format); } } return(null); }
/** * * @param filePath */ public void startPlay(string path) { eosReceived = false; mExtractor = new MediaExtractor(); try { mExtractor.SetDataSource(path); } catch (IOException e) { e.PrintStackTrace(); } int channel = 0; for (int i = 0; i < mExtractor.TrackCount; i++) { MediaFormat format = mExtractor.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); if (mime.StartsWith("audio/")) { mExtractor.SelectTrack(i); Log.Debug("TAG", "format : " + format); ByteBuffer csd = format.GetByteBuffer("csd-0"); for (int k = 0; k < csd.Capacity(); ++k) { Log.Error("TAG", "csd : " + csd.ToArray<Byte>()[k]); } mSampleRate = format.GetInteger(MediaFormat.KeySampleRate); channel = format.GetInteger(MediaFormat.KeyChannelCount); break; } } MediaFormat format2 = makeAACCodecSpecificData(MediaCodecInfo.CodecProfileLevel.AACObjectLC, mSampleRate, channel); if (format2 == null) return; mDecoder = MediaCodec.createDecoderByType("audio/mp4a-latm"); mDecoder.configure(format, null, null, 0); if (mDecoder == null) { Log.e("DecodeActivity", "Can't find video info!"); return; } mDecoder.start(); new Thread(AACDecoderAndPlayRunnable).start(); }
partial void InitializeMediaExtractor(string mediaDataUrl, long startPosition, long length) { if (mediaDataUrl == null) { throw new ArgumentNullException(nameof(mediaDataUrl)); } ReleaseMediaInternal(); InputFile = new Java.IO.File(mediaDataUrl); if (!InputFile.CanRead()) { throw new Exception(string.Format("Unable to read: {0} ", InputFile.AbsolutePath)); } InputFileStream = new Java.IO.FileInputStream(InputFile.AbsolutePath); audioMediaExtractor = new MediaExtractor(); audioMediaExtractor.SetDataSource(InputFileStream.FD, startPosition, length); trackIndexAudio = FindAudioTrack(audioMediaExtractor); if (trackIndexAudio < 0) { ReleaseMediaInternal(); Logger.Error($"The input file '{mediaDataUrl}' does not contain any audio track."); return; } audioMediaExtractor.SelectTrack(trackIndexAudio); var audioFormat = audioMediaExtractor.GetTrackFormat(trackIndexAudio); var mime = audioFormat.GetString(MediaFormat.KeyMime); audioMediaDecoder = MediaCodec.CreateDecoderByType(mime); audioMediaDecoder.Configure(audioFormat, null, null, 0); //Get the audio settings //should we override the settings (channels, sampleRate, ...) from DynamicSoundSource? Channels = audioFormat.GetInteger(MediaFormat.KeyChannelCount); SampleRate = audioFormat.GetInteger(MediaFormat.KeySampleRate); MediaDuration = TimeSpanExtensions.FromMicroSeconds(audioFormat.GetLong(MediaFormat.KeyDuration)); audioMediaDecoder.Start(); extractionOutputDone = false; extractionInputDone = false; }
public static MediaFormat GetAudioTrackFormat(string filepath, Android.Net.Uri inputUri = null) { MediaExtractor extractor = new MediaExtractor(); if (inputUri != null) { extractor.SetDataSource(Android.App.Application.Context, inputUri, null); } else if (filepath != null) { extractor.SetDataSource(filepath); } int trackCount = extractor.TrackCount; int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = false; } if (selectCurrentTrack) { extractor.SelectTrack(i); if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } return(format); } } return(null); }
/** * Tests extraction from an MP4 to a series of PNG files. * <p> * We scale the video to 640x480 for the PNG just to demonstrate that we can scale the * video with the GPU. If the input video has a different aspect ratio, we could preserve * it by adjusting the GL viewport to get letterboxing or pillarboxing, but generally if * you're extracting frames you don't want black bars. */ public void extractMpegFrames(int saveWidth, int saveHeight) { MediaCodec decoder = null; CodecOutputSurface outputSurface = null; MediaExtractor extractor = null; try { // must be an absolute path The MediaExtractor error messages aren't very useful. Check to see if the input file exists so we can throw a better one if it's not there. File inputFile = new File(_filesdir, INPUT_FILE); if (!inputFile.CanRead()) { throw new FileNotFoundException("Unable to read " + inputFile); } extractor = new MediaExtractor(); extractor.SetDataSource(inputFile.ToString()); int trackIndex = selectTrack(extractor); if (trackIndex < 0) { throw new RuntimeException("No video track found in " + inputFile); } extractor.SelectTrack(trackIndex); MediaFormat format = extractor.GetTrackFormat(trackIndex); if (VERBOSE) { Log.Info(TAG, "Video size is " + format.GetInteger(MediaFormat.KeyWidth) + "x" + format.GetInteger(MediaFormat.KeyHeight)); } // Could use width/height from the MediaFormat to get full-size frames. outputSurface = new CodecOutputSurface(saveWidth, saveHeight); // Create a MediaCodec decoder, and configure it with the MediaFormat from the // extractor. It's very important to use the format from the extractor because // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. String mime = format.GetString(MediaFormat.KeyMime); decoder = MediaCodec.CreateDecoderByType(mime); decoder.Configure(format, outputSurface.getSurface(), null, 0); decoder.Start(); doExtract(extractor, trackIndex, decoder, outputSurface); } finally { // release everything we grabbed if (outputSurface != null) { outputSurface.release(); outputSurface = null; } if (decoder != null) { decoder.Stop(); decoder.Release(); decoder = null; } if (extractor != null) { extractor.Release(); extractor = null; } } }
private static bool genVideoUsingMuxer(String srcPath, String dstPath, long startMicroSeconds, long endMicroSeconds, bool useAudio, bool useVideo) { if (startMicroSeconds == endMicroSeconds) { throw new InvalidParameterException("You shit!! end has to be greater than start!!"); } // Set up MediaExtractor to read from the source. MediaExtractor extractor = new MediaExtractor(); extractor.SetDataSource(srcPath); int trackCount = extractor.TrackCount; // Set up MediaMuxer for the destination. var muxer = new MediaMuxer(dstPath, MediaMuxer.OutputFormat.MuxerOutputMpeg4); // Set up the tracks and retrieve the max buffer size for selected // tracks. Dictionary <int, int> indexMap = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.GetTrackFormat(i); String mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/") && useAudio) { selectCurrentTrack = true; } else if (mime.StartsWith("video/") && useVideo) { selectCurrentTrack = true; } if (selectCurrentTrack) { extractor.SelectTrack(i); int dstIndex = muxer.AddTrack(format); indexMap.Add(i, dstIndex); if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize? newSize : bufferSize; } } } if (bufferSize < 0) { bufferSize = DEFAULT_BUFFER_SIZE; } // Set up the orientation and starting time for extractor. MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); retrieverSrc.SetDataSource(srcPath); String degreesString = retrieverSrc.ExtractMetadata(MediaMetadataRetriever.MetadataKeyVideoRotation); if (degreesString != null) { int degrees = Integer.ParseInt(degreesString); if (degrees >= 0) { muxer.SetOrientationHint(degrees); } } if (startMicroSeconds > 0) { extractor.SeekTo(startMicroSeconds, MediaExtractor.SeekToClosestSync); } // Copy the samples from MediaExtractor to MediaMuxer. We will loop // for copying each sample and stop when we get to the end of the source // file or exceed the end time of the trimming. int offset = 0; int trackIndex = -1; ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); try { muxer.Start(); while (true) { bufferInfo.Offset = offset; bufferInfo.Size = extractor.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { Log.Info(LOGTAG, "Saw input EOS."); bufferInfo.Size = 0; break; } else { bufferInfo.PresentationTimeUs = extractor.SampleTime; if (endMicroSeconds > 0 && bufferInfo.PresentationTimeUs > endMicroSeconds) { Log.Info(LOGTAG, "The current sample is over the trim end time."); break; } else { bufferInfo.Flags = GetSyncsampleflags(extractor.SampleFlags); //had to map this shit not sure if its right trackIndex = extractor.SampleTrackIndex; muxer.WriteSampleData(indexMap[trackIndex], dstBuf, bufferInfo); extractor.Advance(); } } } muxer.Stop(); } catch (IllegalStateException e) { // Swallow the exception due to malformed source. Log.Info(LOGTAG, "The source video file is malformed"); return(false); } finally { muxer.Release(); } return(true); }
public static Result DecodeAudio(FileDescriptor descriptor, long offset, long length) { using (var extractor = new MediaExtractor()) { extractor.SetDataSource(descriptor, offset, length); MediaFormat format = null; string mime = null; for (int i = 0; i < extractor.TrackCount; i++) { format = extractor.GetTrackFormat(i); mime = format.GetString(MediaFormat.KeyMime); if (!mime.StartsWith("audio/")) { continue; } extractor.SelectTrack(i); } if (format == null || !mime.StartsWith("audio/")) { throw new ContentLoadException("Could not find any audio track."); } int sampleRate = format.GetInteger(MediaFormat.KeySampleRate); long duration = format.GetLong(MediaFormat.KeyDuration); int channels = format.GetInteger(MediaFormat.KeyChannelCount); int samples = (int)(sampleRate * duration / 1000000d); var output = new byte[samples * 2]; int timeoutsLeft = 1000; var decoder = MediaCodecPool.RentDecoder(mime); try { decoder.Configure(format, null, null, MediaCodecConfigFlags.None); decoder.Start(); ByteBuffer[] inputBuffers = decoder.GetInputBuffers(); ByteBuffer[] outputBuffers = decoder.GetOutputBuffers(); var bufferInfo = new MediaCodec.BufferInfo(); int totalOffset = 0; bool endOfStream = false; while (true) { // we dont need to have a endOfStream local, // but it saves us a few calls to the decoder if (!endOfStream) { int inputBufIndex = decoder.DequeueInputBuffer(5000); if (inputBufIndex >= 0) { int size = extractor.ReadSampleData(inputBuffers[inputBufIndex], 0); if (size > 0) { decoder.QueueInputBuffer( inputBufIndex, 0, size, extractor.SampleTime, MediaCodecBufferFlags.None); } if (!extractor.Advance()) { endOfStream = true; decoder.QueueInputBuffer( inputBufIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); } } } int decoderStatus = decoder.DequeueOutputBuffer(bufferInfo, 5000); if (decoderStatus >= 0) { IntPtr bufferPtr = outputBuffers[decoderStatus].GetDirectBufferAddress(); IntPtr offsetPtr = bufferPtr + bufferInfo.Offset; int size = bufferInfo.Size; Marshal.Copy(offsetPtr, output, totalOffset, size); decoder.ReleaseOutputBuffer(decoderStatus, render: false); totalOffset += size; if (bufferInfo.Flags == MediaCodecBufferFlags.EndOfStream) { if (totalOffset != output.Length) { throw new ContentLoadException( "Reached end of stream before reading expected amount of samples."); } break; } } else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged) { outputBuffers = decoder.GetOutputBuffers(); } else if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater) { if (timeoutsLeft-- <= 0) { break; } } } } finally { decoder.Stop(); MediaCodecPool.ReturnDecoder(mime, decoder); } if (timeoutsLeft <= 0) { throw new ContentLoadException("Could not load sound effect in designated time frame."); } return(new Result(output, sampleRate, channels, mime)); } }
/// <summary> /// if both inputPath string and inputUri are not null, this /// method will use the Uri. Else, set one or the other /// /// They cannot both be null /// </summary> /// <param name="startMs">the start ms for trimming</param> /// <param name="endMs">the final ms for trimming</param> /// <param name="inputPath">optional input path string</param> /// <param name="muxer">the muxer to use for writing bytes</param> /// <param name="trackIndexOverride">the track index for muxer read/write to</param> /// <param name="bufferInfo">an input bufferinfo to get properties from</param> /// <param name="outputPath">the output path for method to check after finished encoding</param> /// <param name="ptOffset">the presentation time offset for audio, used in syncing audio and video</param> /// <param name="inputUri">optional inputUri to read from</param> /// <returns></returns> public async Task <string> HybridMuxingTrimmer(int startMs, int endMs, string inputPath, MediaMuxer muxer, int trackIndexOverride = -1, BufferInfo bufferInfo = null, string outputPath = null, long ptOffset = 0, Android.Net.Uri inputUri = null) { var tio = trackIndexOverride; await Task.Run(() => { if (outputPath == null) { outputPath = FileToMp4.LatestOutputPath; } MediaExtractor ext = new MediaExtractor(); if (inputUri != null) { ext.SetDataSource(Android.App.Application.Context, inputUri, null); } else { ext.SetDataSource(inputPath); } int trackCount = ext.TrackCount; Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = ext.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = false; } /*rerouted to gl video encoder*/ if (selectCurrentTrack) { ext.SelectTrack(i); if (tio != -1) { indexDict.Add(i, i); } if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } } } MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); if (!System.String.IsNullOrWhiteSpace(inputPath)) { retrieverSrc.SetDataSource(inputPath); } else { retrieverSrc.SetDataSource(Android.App.Application.Context, inputUri); } string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation); if (degreesString != null) // unused ATM but will be useful for stabilized videoview in streaming { int degrees = int.Parse(degreesString); if (degrees >= 0) /* muxer.SetOrientationHint(degrees); */ } { //muxer won't accept this param once started } if (startMs > 0) { ext.SeekTo(startMs * 1000, MediaExtractorSeekTo.ClosestSync); } int offset = 0; if (bufferInfo == null) { bufferInfo = new MediaCodec.BufferInfo(); } ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); long us = endMs * 1000; long uo = us + ptOffset; int cf = 0; try { FileToMp4.AudioEncodingInProgress = true; while (true) { bufferInfo.Offset = offset; bufferInfo.Size = ext.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { bufferInfo.Size = 0; break; } else { cf++; bufferInfo.PresentationTimeUs = ext.SampleTime + ptOffset; if (ext.SampleTime >= us) { break; } //out of while else { bufferInfo.Flags = MFlags2MCodecBuff(ext.SampleFlags); if (tio == -1) { muxer.WriteSampleData(FileToMp4.LatestAudioTrackIndex, dstBuf, bufferInfo); } else { muxer.WriteSampleData(tio, dstBuf, bufferInfo); } if (cf >= 240) //only send the muxer eventargs once every x frames to reduce CPU load { Notify(ext.SampleTime, us); cf = 0; } } ext.Advance(); } } } catch (Java.Lang.IllegalStateException e) { this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true)); Console.WriteLine("The source video file is malformed"); } catch (Java.Lang.Exception ex) { this.Progress.Invoke(new MuxerEventArgs(ext.SampleTime, us, null, true, true)); Console.WriteLine(ex.Message); } if (AppSettings.Logging.SendToConsole) { System.Console.WriteLine($"DrainEncoder audio finished @ {bufferInfo.PresentationTimeUs}"); } }); FileToMp4.AudioEncodingInProgress = false; try { if (!FileToMp4.VideoEncodingInProgress) { muxer.Stop(); muxer.Release(); muxer = null; } } catch (Java.Lang.Exception ex) { Log.Debug("MuxingEncoder", ex.Message); } if (outputPath != null) { var success = System.IO.File.Exists(outputPath); if (success) { this.Progress.Invoke(new MuxerEventArgs(endMs * 1000, endMs, outputPath, true)); return(outputPath); } } return(null); //nothing to look for }
public Task <bool> TrimAsync(int startMS, int lengthMS, string inputPath, string outputPath) { return(Task.Run <bool>(() => { try { bool didOperationSucceed = false; MediaExtractor extractor = new MediaExtractor(); extractor.SetDataSource(inputPath); int trackCount = extractor.TrackCount; // Set up MediaMuxer for the destination. MediaMuxer muxer; muxer = new MediaMuxer(outputPath, MuxerOutputType.Mpeg4); // Set up the tracks and retrieve the max buffer size for selected // tracks. Dictionary <int, int> indexDict = new Dictionary <int, int>(trackCount); int bufferSize = -1; for (int i = 0; i < trackCount; i++) { MediaFormat format = extractor.GetTrackFormat(i); string mime = format.GetString(MediaFormat.KeyMime); bool selectCurrentTrack = false; if (mime.StartsWith("audio/")) { selectCurrentTrack = true; } else if (mime.StartsWith("video/")) { selectCurrentTrack = true; } if (selectCurrentTrack) { extractor.SelectTrack(i); int dstIndex = muxer.AddTrack(format); indexDict.Add(i, dstIndex); if (format.ContainsKey(MediaFormat.KeyMaxInputSize)) { int newSize = format.GetInteger(MediaFormat.KeyMaxInputSize); bufferSize = newSize > bufferSize ? newSize : bufferSize; } } } if (bufferSize < 0) { bufferSize = 1337; //TODO: I don't know what to put here tbh, it will most likely be above 0 at this point anyways :) } // Set up the orientation and starting time for extractor. MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); retrieverSrc.SetDataSource(inputPath); string degreesString = retrieverSrc.ExtractMetadata(MetadataKey.VideoRotation); if (degreesString != null) { int degrees = int.Parse(degreesString); if (degrees >= 0) { muxer.SetOrientationHint(degrees); } } if (startMS > 0) { extractor.SeekTo(startMS * 1000, MediaExtractorSeekTo.ClosestSync); } // Copy the samples from MediaExtractor to MediaMuxer. We will loop // for copying each sample and stop when we get to the end of the source // file or exceed the end time of the trimming. int offset = 0; int trackIndex = -1; ByteBuffer dstBuf = ByteBuffer.Allocate(bufferSize); MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); try { muxer.Start(); while (true) { bufferInfo.Offset = offset; bufferInfo.Size = extractor.ReadSampleData(dstBuf, offset); if (bufferInfo.Size < 0) { bufferInfo.Size = 0; break; } else { bufferInfo.PresentationTimeUs = extractor.SampleTime; if (lengthMS > 0 && bufferInfo.PresentationTimeUs > ((startMS + lengthMS - 1) * 1000)) { Console.WriteLine("The current sample is over the trim end time."); break; } else { bufferInfo.Flags = ConvertMediaExtractorSampleFlagsToMediaCodecBufferFlags(extractor.SampleFlags); trackIndex = extractor.SampleTrackIndex; muxer.WriteSampleData(indexDict[trackIndex], dstBuf, bufferInfo); extractor.Advance(); } } } muxer.Stop(); didOperationSucceed = true; //deleting the old file //JFile file = new JFile(srcPath); //file.Delete(); } catch (IllegalStateException e) { // Swallow the exception due to malformed source. Console.WriteLine("The source video file is malformed"); } finally { muxer.Release(); } return didOperationSucceed; } catch (System.Exception xx) { return false; } })); // Set up MediaExtractor to read from the source. }
public static SortedList <long, ByteBuffer> GetFrames(Context context, String path) { try { var bArray = new SortedList <long, ByteBuffer>(); MediaExtractor extractor = new MediaExtractor(); extractor.SetDataSource(path); int trackIndex = selectTrack(extractor); if (trackIndex < 0) { throw new InvalidParameterException("F**K you no track"); } extractor.SelectTrack(trackIndex); //var mRetriever = new MediaMetadataRetriever(); //mRetriever.SetDataSource(path); //var fps = mRetriever.ExtractMetadata(Android.Media.MetadataKey.CaptureFramerate); //var dur = DurationMS(mRetriever); //ByteBuffer inputBuffer = ByteBuffer.Allocate(1280 * 720); //Android.Net.Uri videoFileUri = Android.Net.Uri.Parse(path); //MediaPlayer mp = MediaPlayer.Create(context, videoFileUri); //int millis = mp.Duration; List <ByteBuffer> buffers = new List <ByteBuffer>(); buffers.Add(ByteBuffer.Allocate(1280 * 720)); //for (int i = 33333; i < dur * 1000; i += 33333) //{ // Bitmap bitmap = mRetriever.GetFrameAtTime(i, Option.Closest); // bArray.Add(bitmap); //} int i = 0; extractor.Advance(); while (extractor.ReadSampleData(buffers[i], 0) >= 0) { //var bbuffer = Utils.deepCopy(inputBuffer); var buffcopy = buffers[i].Duplicate(); //int trackIndex = extractor.getSampleTrackIndex(); //long presentationTimeUs = extractor.SampleTime; bArray.Add(extractor.SampleTime, buffcopy); buffers.Add(ByteBuffer.Allocate(1280 * 720)); i++; extractor.Advance(); } //mediaExtractor.Advance(); //var fps = 1000000f / (float)mediaExtractor.SampleTime; //extractor.SeekTo(0, MediaExtractorSeekTo.None); return(bArray); } catch (Exception e) { return(null); } }
public void PrepareEncoder(string path, File _downloaddir) { MediaCodec _Decoder = null; MediaExtractor extractor = null; _downloadsfilesdir = _downloaddir; try { //for (int i = 0; i < extractor.TrackCount; i++) //{ // MediaFormat Format = extractor.GetTrackFormat(i); // //MediaFormat format = MediaFormat.CreateVideoFormat(MIME_TYPE, 640, 360); // String mime = Format.GetString(MediaFormat.KeyMime); // if (mime.StartsWith("video/")) // { // extractor.SelectTrack(i); // _Decoder = MediaCodec.CreateEncoderByType(mime); // _Decoder.Configure(Format, null, null, 0); // break; // } //} extractor = new MediaExtractor(); extractor.SetDataSource(path); int trackIndex = selectTrack(extractor); //if (trackIndex < 0) //{ // throw new RuntimeException("No video track found in " + inputFile); //} extractor.SelectTrack(trackIndex); MediaFormat format = extractor.GetTrackFormat(trackIndex); _Width = format.GetInteger(MediaFormat.KeyWidth); _Height = format.GetInteger(MediaFormat.KeyHeight); // Could use width/height from the MediaFormat to get full-size frames. //outputSurface = new CodecOutputSurface(saveWidth, saveHeight); // Create a MediaCodec decoder, and configure it with the MediaFormat from the // extractor. It's very important to use the format from the extractor because // it contains a copy of the CSD-0/CSD-1 codec-specific data chunks. String mime = format.GetString(MediaFormat.KeyMime); _Decoder = MediaCodec.CreateDecoderByType(mime); _Decoder.Configure(format, null, null, 0); _Decoder.Start(); Decode(_Decoder, extractor); } catch (Exception e) { Log.Error(TAG, e.Message, e); throw; } finally { // release everything we grabbed //if (outputSurface != null) //{ // outputSurface.release(); // outputSurface = null; //} if (_Decoder != null) { _Decoder.Stop(); _Decoder.Release(); _Decoder = null; } if (extractor != null) { extractor.Release(); extractor = null; } } _TrackIndex = -1; //_MuxerStarted = false; }