public static bool VerifyIsMovie(string file) { try { Android.Media.MediaExtractor extractor = new Android.Media.MediaExtractor(); extractor.SetDataSource(file); var trackCount = extractor.TrackCount; for (int i = 0; i < trackCount; i++) { var trackFormat = extractor.GetTrackFormat(i); var mime = trackFormat.GetString(MediaFormat.KeyMime); if (mime.Contains("video")) { return(true); } Console.WriteLine(mime); } return(false); // using(MediaMetadataRetriever reader = new MediaMetadataRetriever()){ // // reader.SetDataSource (file); // var frame = reader.GetFrameAtTime(300); // var hasVideo = reader.ExtractMetadata (MetadataKey.HasVideo); // var hasAudio = reader.ExtractMetadata (MetadataKey.HasAudio); // var frameSize = reader.ExtractMetadata(MetadataKey.VideoHeight); // var bitrate = reader.ExtractMetadata(MetadataKey.Bitrate); // return !string.IsNullOrEmpty(hasVideo); //// using (var player = MusicPlayer.Create (App.Context,Android.Net.Uri.FromFile(new Java.IO.File(file)))) { //// player.si //// var size = Math.Max (player.VideoHeight, player.VideoWidth); //// if (size > 0) //// return true; // } } catch (Exception ex) { LogManager.Shared.Report(ex); } return(false); }
override protected async void Run() { //Android.Media.MediaExtractor extractor; //Android.Media.MediaCodec decoder = null; using (var extractor = new Android.Media.MediaExtractor()) //using (Android.Media.MediaCodec decoder = null) { //extractor = new Android.Media.MediaExtractor(); try { await extractor.SetDataSourceAsync(SAMPLE).ConfigureAwait(false); } catch (Exception ex) { var s = ex.ToString(); return; } //var mf = new MediaFormat(); //mf.SetString(MediaFormat.KeyMime, "video/avc"); //mf.SetInteger(MediaFormat.KeyMaxInputSize, 144154);// 1024 * 100); //mf.SetInteger(MediaFormat.KeyWidth, 720); //mf.SetInteger(MediaFormat.KeyHeight, 480); //mf.SetInteger("max-width", 720); //mf.SetInteger("max-height", 480); ////mf.SetInteger("push-blank-buffers-on-shudown", 1); //Log.Debug("Format info: ", mf.ToString()); for (int i = 0; i < extractor.TrackCount; i++) { format = extractor.GetTrackFormat(i); Log.Debug("Format info: ", format.ToString()); String mime = format.GetString(Android.Media.MediaFormat.KeyMime); if (mime.StartsWith("video/")) { Log.Debug("Format mime: ", mime); //Log.Debug("Format " + MediaFormat.KeyMaxInputSize + ": ", // format.GetInteger(MediaFormat.KeyMaxInputSize).ToString()); Log.Debug("Format " + MediaFormat.KeyWidth + ": ", format.GetInteger(MediaFormat.KeyWidth).ToString()); Log.Debug("Format " + MediaFormat.KeyHeight + ": ", format.GetInteger(MediaFormat.KeyHeight).ToString()); PrintFormatInfo(format); extractor.SelectTrack(i); InitializeDecoder(); //decoder = Android.Media.MediaCodec.CreateDecoderByType(mime); ////this is where the Xamarin Android VM dies. //decoder.Configure(format, surface, null, 0); break; } } if (decoder == null) { Android.Util.Log.Error("DecodeActivity", "Can't find video info!"); return;//can't continue... } using (decoder) { bool isEOS = false; var sw = new System.Diagnostics.Stopwatch(); sw.Start(); byte[] peekBuf = new byte[188]; while (running) { //sw.Restart(); if (!isEOS) { int inIndex = decoder.DequeueInputBuffer(10000); if (inIndex >= 0) { var buffer = inputBuffers[inIndex];// decoder.GetInputBuffer(inIndex); if(buffer.Position() != 0) Log.Debug("inBuff.Position: ", buffer.Position().ToString()); Log.Debug("inBuff: ", buffer.ToString()); int sampleSize = extractor.ReadSampleData(buffer, buffer.Position()); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to decoder, we will get it again from the // dequeueOutputBuffer Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); decoder.QueueInputBuffer(inIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); isEOS = true; } else { if(peekBuf.Length < sampleSize) peekBuf = new byte[sampleSize]; peekBuf.Initialize();//clear old data. buffer.Get(peekBuf); buffer.Position(0);//reset for the decoder for(int i =4; i<peekBuf.Length; ++i) { if (peekBuf[i] == 0x01 && peekBuf[i - 1] == 0x00 && peekBuf[i - 2] == 0x00 && peekBuf[i - 3] == 0x00) Log.Debug("Found h264 start code: ", string.Format("i={0} of {1}", i, sampleSize)); } Log.Debug("ExtractorActivity, sampleSize: ", sampleSize.ToString()); decoder.QueueInputBuffer(inIndex, 0, sampleSize, extractor.SampleTime, 0); //await extractor.AdvanceAsync().ConfigureAwait(false); extractor.AdvanceAsync(); } } } int outIndex = decoder.DequeueOutputBuffer(info, 10000); switch ((Android.Media.MediaCodecInfoState)outIndex) { case MediaCodecInfoState.OutputBuffersChanged: Android.Util.Log.Debug("DecodeActivity", MediaCodecInfoState.OutputBuffersChanged.ToString()); outputBuffers = decoder.GetOutputBuffers(); break; case MediaCodecInfoState.OutputFormatChanged: PrintFormatInfo(decoder.OutputFormat); Android.Util.Log.Debug("DecodeActivity", "New format " + decoder.OutputFormat);//.GetOutputFormat(outIndex)); break; case MediaCodecInfoState.TryAgainLater: Android.Util.Log.Debug("DecodeActivity", "dequeueOutputBuffer timed out!"); break; default: var buffer = outputBuffers[outIndex];// decoder.GetOutputBuffer(outIndex); Android.Util.Log.Verbose("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer); // We use a very simple clock to keep the video FPS, or the video // playback will be too fast //This causes the next frame to not be rendered too quickly. while (info.PresentationTimeUs / 1000 > sw.ElapsedMilliseconds ) { try { await Task.Delay(10).ConfigureAwait(false); //sleep(10); } catch (Exception e) { //e.printStackTrace(); System.Diagnostics.Debug.WriteLine(e.StackTrace); break; } } //the decoder won't advance without this... //must be called before the next decoder.dequeue call decoder.ReleaseOutputBuffer(outIndex, true); break; } // All decoded frames have been rendered, we can stop playing now if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Android.Util.Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); break; } } decoder.Stop(); } } }
override protected async void Run() { //Android.Media.MediaExtractor extractor; //Android.Media.MediaCodec decoder = null; using (var extractor = new Android.Media.MediaExtractor()) //using (Android.Media.MediaCodec decoder = null) { //extractor = new Android.Media.MediaExtractor(); try { await extractor.SetDataSourceAsync(SAMPLE).ConfigureAwait(false); } catch (Exception ex) { var s = ex.ToString(); return; } //var mf = new MediaFormat(); //mf.SetString(MediaFormat.KeyMime, "video/avc"); //mf.SetInteger(MediaFormat.KeyMaxInputSize, 144154);// 1024 * 100); //mf.SetInteger(MediaFormat.KeyWidth, 720); //mf.SetInteger(MediaFormat.KeyHeight, 480); //mf.SetInteger("max-width", 720); //mf.SetInteger("max-height", 480); ////mf.SetInteger("push-blank-buffers-on-shudown", 1); //Log.Debug("Format info: ", mf.ToString()); for (int i = 0; i < extractor.TrackCount; i++) { format = extractor.GetTrackFormat(i); Log.Debug("Format info: ", format.ToString()); String mime = format.GetString(Android.Media.MediaFormat.KeyMime); if (mime.StartsWith("video/")) { Log.Debug("Format mime: ", mime); //Log.Debug("Format " + MediaFormat.KeyMaxInputSize + ": ", // format.GetInteger(MediaFormat.KeyMaxInputSize).ToString()); Log.Debug("Format " + MediaFormat.KeyWidth + ": ", format.GetInteger(MediaFormat.KeyWidth).ToString()); Log.Debug("Format " + MediaFormat.KeyHeight + ": ", format.GetInteger(MediaFormat.KeyHeight).ToString()); PrintFormatInfo(format); extractor.SelectTrack(i); InitializeDecoder(); //decoder = Android.Media.MediaCodec.CreateDecoderByType(mime); ////this is where the Xamarin Android VM dies. //decoder.Configure(format, surface, null, 0); break; } } if (decoder == null) { Android.Util.Log.Error("DecodeActivity", "Can't find video info!"); return;//can't continue... } using (decoder) { bool isEOS = false; var sw = new System.Diagnostics.Stopwatch(); sw.Start(); byte[] peekBuf = new byte[188]; while (running) { //sw.Restart(); if (!isEOS) { int inIndex = decoder.DequeueInputBuffer(10000); if (inIndex >= 0) { var buffer = inputBuffers[inIndex];// decoder.GetInputBuffer(inIndex); if (buffer.Position() != 0) { Log.Debug("inBuff.Position: ", buffer.Position().ToString()); } Log.Debug("inBuff: ", buffer.ToString()); int sampleSize = extractor.ReadSampleData(buffer, buffer.Position()); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to decoder, we will get it again from the // dequeueOutputBuffer Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); decoder.QueueInputBuffer(inIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); isEOS = true; } else { if (peekBuf.Length < sampleSize) { peekBuf = new byte[sampleSize]; } peekBuf.Initialize(); //clear old data. buffer.Get(peekBuf); buffer.Position(0); //reset for the decoder for (int i = 4; i < peekBuf.Length; ++i) { if (peekBuf[i] == 0x01 && peekBuf[i - 1] == 0x00 && peekBuf[i - 2] == 0x00 && peekBuf[i - 3] == 0x00) { Log.Debug("Found h264 start code: ", string.Format("i={0} of {1}", i, sampleSize)); } } Log.Debug("ExtractorActivity, sampleSize: ", sampleSize.ToString()); decoder.QueueInputBuffer(inIndex, 0, sampleSize, extractor.SampleTime, 0); //await extractor.AdvanceAsync().ConfigureAwait(false); extractor.AdvanceAsync(); } } } int outIndex = decoder.DequeueOutputBuffer(info, 10000); switch ((Android.Media.MediaCodecInfoState)outIndex) { case MediaCodecInfoState.OutputBuffersChanged: Android.Util.Log.Debug("DecodeActivity", MediaCodecInfoState.OutputBuffersChanged.ToString()); outputBuffers = decoder.GetOutputBuffers(); break; case MediaCodecInfoState.OutputFormatChanged: PrintFormatInfo(decoder.OutputFormat); Android.Util.Log.Debug("DecodeActivity", "New format " + decoder.OutputFormat); //.GetOutputFormat(outIndex)); break; case MediaCodecInfoState.TryAgainLater: Android.Util.Log.Debug("DecodeActivity", "dequeueOutputBuffer timed out!"); break; default: var buffer = outputBuffers[outIndex]; // decoder.GetOutputBuffer(outIndex); Android.Util.Log.Verbose("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer); // We use a very simple clock to keep the video FPS, or the video // playback will be too fast //This causes the next frame to not be rendered too quickly. while (info.PresentationTimeUs / 1000 > sw.ElapsedMilliseconds) { try { await Task.Delay(10).ConfigureAwait(false); //sleep(10); } catch (Exception e) { //e.printStackTrace(); System.Diagnostics.Debug.WriteLine(e.StackTrace); break; } } //the decoder won't advance without this... //must be called before the next decoder.dequeue call decoder.ReleaseOutputBuffer(outIndex, true); break; } // All decoded frames have been rendered, we can stop playing now if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Android.Util.Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); break; } } decoder.Stop(); } } }
override public async void Run() { //Android.Media.MediaExtractor extractor; Android.Media.MediaCodec decoder = null; using (var extractor = new Android.Media.MediaExtractor()) //using (Android.Media.MediaCodec decoder = null) { //extractor = new Android.Media.MediaExtractor(); try { await extractor.SetDataSourceAsync(SAMPLE).ConfigureAwait(false); } catch (Exception ex) { var s = ex.ToString(); return; } for (int i = 0; i < extractor.TrackCount; i++) { var format = extractor.GetTrackFormat(i); Log.Debug("Format info: ", format.ToString()); String mime = format.GetString(Android.Media.MediaFormat.KeyMime); if (mime.StartsWith("video/")) { Log.Debug("Format mime: ", mime); //Log.Debug("Format " + MediaFormat.KeyMaxInputSize + ": ", // format.GetInteger(MediaFormat.KeyMaxInputSize).ToString()); Log.Debug("Format " + MediaFormat.KeyWidth + ": ", format.GetInteger(MediaFormat.KeyWidth).ToString()); Log.Debug("Format " + MediaFormat.KeyHeight + ": ", format.GetInteger(MediaFormat.KeyHeight).ToString()); PrintFormatInfo(format); extractor.SelectTrack(i); //decoder = Android.Media.MediaCodec.CreateDecoderByType(mime); //this is where the Xamarin Android VM dies. //decoder.Configure(format, surface, null, 0); break; } } //if (decoder == null) //{ // Android.Util.Log.Error("DecodeActivity", "Can't find video info!"); // return;//can't continue... //} var f = new Java.IO.File(dir+"decode.out"); if (f.Exists()) f.Delete(); f.CreateNewFile(); var f2 = new Java.IO.File(dir + "decode2.out"); if (f2.Exists()) f2.Delete(); f2.CreateNewFile(); //open the file for our custom extractor var inInfo = new System.IO.FileInfo(SAMPLE); if (!inInfo.Exists) { Log.Error("input file not found!", inInfo.FullName); return; } using (var inStream = inInfo.OpenRead()) using (var fs2 = new Java.IO.FileOutputStream(f2))//get an output stream using (var fs = new Java.IO.FileOutputStream(f))//get an output stream { //var inputBuffers = decoder.GetInputBuffers(); //var outputBuffers = decoder.GetOutputBuffers(); var info = new Android.Media.MediaCodec.BufferInfo(); bool started = false, isEOS = false; var sw = new System.Diagnostics.Stopwatch(); long startMs = sw.ElapsedMilliseconds; sw.Start(); byte[] peekBuf = new byte[188]; //for dumping the sample into instead of the decoder. var buffer = Java.Nio.ByteBuffer.Allocate(165000);// decoder.GetInputBuffer(inIndex); var buffEx = new BufferExtractor(); var tmpB = new byte[20000]; while (!interrupted) { //sw.Restart(); if (!isEOS) { int inIndex = 1;// decoder.DequeueInputBuffer(10000); if (inIndex >= 0) { buffer.Position(0);//reset the buffer if (buffer.Position() != 0) Log.Debug("inBuff.Position: ", buffer.Position().ToString()); Log.Debug("inBuff: ", buffer.ToString()); int sampleSize = extractor.ReadSampleData(buffer, 0); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to decoder, we will get it again from the // dequeueOutputBuffer Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); //decoder.QueueInputBuffer(inIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); isEOS = true; } else { if (peekBuf.Length < sampleSize) peekBuf = new byte[sampleSize]; peekBuf.Initialize();//clear old data. buffer.Get(peekBuf); buffer.Position(0);//reset for the decoder for (int i = 4; i < peekBuf.Length; ++i) { if (peekBuf[i] == 0x01 && peekBuf[i - 1] == 0x00 && peekBuf[i - 2] == 0x00 && peekBuf[i - 3] == 0x00) Log.Debug("Found h264 start code: ", string.Format("i={0} of {1}", i, sampleSize)); } Log.Debug("ExtractorActivity, sampleSize: ", sampleSize.ToString()); if (!started)//get your parser synced with theirs { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); if (buffEx.outBuffers.Count > 0 && buffEx.outBuffers.Peek().GetPayload().Length != sampleSize) { buffEx.outBuffers.Dequeue();//throw this one away } } while (buffEx.outBuffers.Count == 0); started = true; } else { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); } while (buffEx.outBuffers.Count == 0); started = true; } //write out the vid data. buffer.Limit(sampleSize); buffer.Position(0); //if (tmpB.Length < sampleSize) tmpB = new byte[sampleSize]; buffer.Get(tmpB); fs.Write(tmpB); buffer.Limit(buffer.Capacity());//reset the limit for next sample buffer.Position(0); fs2.Write(buffEx.outBuffers.Dequeue().GetPayload()); if (!inStream.CanRead) isEOS = true;//end of stream. //decoder.QueueInputBuffer(inIndex, 0, sampleSize, extractor.SampleTime, 0); await extractor.AdvanceAsync().ConfigureAwait(false); //extractor.AdvanceAsync(); } } } // All decoded frames have been rendered, we can stop playing now if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Android.Util.Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); break; } } //decoder.Stop(); } } }
override public async void Run() { //Android.Media.MediaExtractor extractor; Android.Media.MediaCodec decoder = null; using (var extractor = new Android.Media.MediaExtractor()) //using (Android.Media.MediaCodec decoder = null) { //extractor = new Android.Media.MediaExtractor(); try { await extractor.SetDataSourceAsync(SAMPLE).ConfigureAwait(false); } catch (Exception ex) { var s = ex.ToString(); return; } for (int i = 0; i < extractor.TrackCount; i++) { var format = extractor.GetTrackFormat(i); Log.Debug("Format info: ", format.ToString()); String mime = format.GetString(Android.Media.MediaFormat.KeyMime); if (mime.StartsWith("video/")) { Log.Debug("Format mime: ", mime); //Log.Debug("Format " + MediaFormat.KeyMaxInputSize + ": ", // format.GetInteger(MediaFormat.KeyMaxInputSize).ToString()); Log.Debug("Format " + MediaFormat.KeyWidth + ": ", format.GetInteger(MediaFormat.KeyWidth).ToString()); Log.Debug("Format " + MediaFormat.KeyHeight + ": ", format.GetInteger(MediaFormat.KeyHeight).ToString()); PrintFormatInfo(format); extractor.SelectTrack(i); //decoder = Android.Media.MediaCodec.CreateDecoderByType(mime); //this is where the Xamarin Android VM dies. //decoder.Configure(format, surface, null, 0); break; } } //if (decoder == null) //{ // Android.Util.Log.Error("DecodeActivity", "Can't find video info!"); // return;//can't continue... //} var f = new Java.IO.File(dir + "decode.out"); if (f.Exists()) { f.Delete(); } f.CreateNewFile(); var f2 = new Java.IO.File(dir + "decode2.out"); if (f2.Exists()) { f2.Delete(); } f2.CreateNewFile(); //open the file for our custom extractor var inInfo = new System.IO.FileInfo(SAMPLE); if (!inInfo.Exists) { Log.Error("input file not found!", inInfo.FullName); return; } using (var inStream = inInfo.OpenRead()) using (var fs2 = new Java.IO.FileOutputStream(f2)) //get an output stream using (var fs = new Java.IO.FileOutputStream(f)) //get an output stream { //var inputBuffers = decoder.GetInputBuffers(); //var outputBuffers = decoder.GetOutputBuffers(); var info = new Android.Media.MediaCodec.BufferInfo(); bool started = false, isEOS = false; var sw = new System.Diagnostics.Stopwatch(); long startMs = sw.ElapsedMilliseconds; sw.Start(); byte[] peekBuf = new byte[188]; //for dumping the sample into instead of the decoder. var buffer = Java.Nio.ByteBuffer.Allocate(165000);// decoder.GetInputBuffer(inIndex); var buffEx = new BufferExtractor(); var tmpB = new byte[20000]; while (!interrupted) { //sw.Restart(); if (!isEOS) { int inIndex = 1;// decoder.DequeueInputBuffer(10000); if (inIndex >= 0) { buffer.Position(0);//reset the buffer if (buffer.Position() != 0) { Log.Debug("inBuff.Position: ", buffer.Position().ToString()); } Log.Debug("inBuff: ", buffer.ToString()); int sampleSize = extractor.ReadSampleData(buffer, 0); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to decoder, we will get it again from the // dequeueOutputBuffer Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); //decoder.QueueInputBuffer(inIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); isEOS = true; } else { if (peekBuf.Length < sampleSize) { peekBuf = new byte[sampleSize]; } peekBuf.Initialize(); //clear old data. buffer.Get(peekBuf); buffer.Position(0); //reset for the decoder for (int i = 4; i < peekBuf.Length; ++i) { if (peekBuf[i] == 0x01 && peekBuf[i - 1] == 0x00 && peekBuf[i - 2] == 0x00 && peekBuf[i - 3] == 0x00) { Log.Debug("Found h264 start code: ", string.Format("i={0} of {1}", i, sampleSize)); } } Log.Debug("ExtractorActivity, sampleSize: ", sampleSize.ToString()); if (!started)//get your parser synced with theirs { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); if (buffEx.outBuffers.Count > 0 && buffEx.outBuffers.Peek().GetPayload().Length != sampleSize) { buffEx.outBuffers.Dequeue();//throw this one away } } while (buffEx.outBuffers.Count == 0); started = true; } else { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); } while (buffEx.outBuffers.Count == 0); started = true; } //write out the vid data. buffer.Limit(sampleSize); buffer.Position(0); //if (tmpB.Length < sampleSize) tmpB = new byte[sampleSize]; buffer.Get(tmpB); fs.Write(tmpB); buffer.Limit(buffer.Capacity());//reset the limit for next sample buffer.Position(0); fs2.Write(buffEx.outBuffers.Dequeue().GetPayload()); if (!inStream.CanRead) { isEOS = true;//end of stream. } //decoder.QueueInputBuffer(inIndex, 0, sampleSize, extractor.SampleTime, 0); await extractor.AdvanceAsync().ConfigureAwait(false); //extractor.AdvanceAsync(); } } } // All decoded frames have been rendered, we can stop playing now if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Android.Util.Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); break; } } //decoder.Stop(); } } }