/// <summary> /// starts a task to decode the video /// </summary> /// <returns></returns> public Task RunAsync() { //var DecodeThread = new Task(() => Run(), TaskCreationOptions.LongRunning); //DecodeThread.Start(); //return DecodeThread; //so we can restart the player. decoder = MediaCodec.CreateDecoderByType("video/avc"); info = new Android.Media.MediaCodec.BufferInfo(); running = true; return (DecodeThread = Task.Run(() => Run())); }
override public async void Run() { //Android.Media.MediaExtractor extractor; Android.Media.MediaCodec decoder = null; using (var extractor = new Android.Media.MediaExtractor()) //using (Android.Media.MediaCodec decoder = null) { //extractor = new Android.Media.MediaExtractor(); try { await extractor.SetDataSourceAsync(SAMPLE).ConfigureAwait(false); } catch (Exception ex) { var s = ex.ToString(); return; } for (int i = 0; i < extractor.TrackCount; i++) { var format = extractor.GetTrackFormat(i); Log.Debug("Format info: ", format.ToString()); String mime = format.GetString(Android.Media.MediaFormat.KeyMime); if (mime.StartsWith("video/")) { Log.Debug("Format mime: ", mime); //Log.Debug("Format " + MediaFormat.KeyMaxInputSize + ": ", // format.GetInteger(MediaFormat.KeyMaxInputSize).ToString()); Log.Debug("Format " + MediaFormat.KeyWidth + ": ", format.GetInteger(MediaFormat.KeyWidth).ToString()); Log.Debug("Format " + MediaFormat.KeyHeight + ": ", format.GetInteger(MediaFormat.KeyHeight).ToString()); PrintFormatInfo(format); extractor.SelectTrack(i); //decoder = Android.Media.MediaCodec.CreateDecoderByType(mime); //this is where the Xamarin Android VM dies. //decoder.Configure(format, surface, null, 0); break; } } //if (decoder == null) //{ // Android.Util.Log.Error("DecodeActivity", "Can't find video info!"); // return;//can't continue... //} var f = new Java.IO.File(dir+"decode.out"); if (f.Exists()) f.Delete(); f.CreateNewFile(); var f2 = new Java.IO.File(dir + "decode2.out"); if (f2.Exists()) f2.Delete(); f2.CreateNewFile(); //open the file for our custom extractor var inInfo = new System.IO.FileInfo(SAMPLE); if (!inInfo.Exists) { Log.Error("input file not found!", inInfo.FullName); return; } using (var inStream = inInfo.OpenRead()) using (var fs2 = new Java.IO.FileOutputStream(f2))//get an output stream using (var fs = new Java.IO.FileOutputStream(f))//get an output stream { //var inputBuffers = decoder.GetInputBuffers(); //var outputBuffers = decoder.GetOutputBuffers(); var info = new Android.Media.MediaCodec.BufferInfo(); bool started = false, isEOS = false; var sw = new System.Diagnostics.Stopwatch(); long startMs = sw.ElapsedMilliseconds; sw.Start(); byte[] peekBuf = new byte[188]; //for dumping the sample into instead of the decoder. var buffer = Java.Nio.ByteBuffer.Allocate(165000);// decoder.GetInputBuffer(inIndex); var buffEx = new BufferExtractor(); var tmpB = new byte[20000]; while (!interrupted) { //sw.Restart(); if (!isEOS) { int inIndex = 1;// decoder.DequeueInputBuffer(10000); if (inIndex >= 0) { buffer.Position(0);//reset the buffer if (buffer.Position() != 0) Log.Debug("inBuff.Position: ", buffer.Position().ToString()); Log.Debug("inBuff: ", buffer.ToString()); int sampleSize = extractor.ReadSampleData(buffer, 0); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to decoder, we will get it again from the // dequeueOutputBuffer Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); //decoder.QueueInputBuffer(inIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); isEOS = true; } else { if (peekBuf.Length < sampleSize) peekBuf = new byte[sampleSize]; peekBuf.Initialize();//clear old data. buffer.Get(peekBuf); buffer.Position(0);//reset for the decoder for (int i = 4; i < peekBuf.Length; ++i) { if (peekBuf[i] == 0x01 && peekBuf[i - 1] == 0x00 && peekBuf[i - 2] == 0x00 && peekBuf[i - 3] == 0x00) Log.Debug("Found h264 start code: ", string.Format("i={0} of {1}", i, sampleSize)); } Log.Debug("ExtractorActivity, sampleSize: ", sampleSize.ToString()); if (!started)//get your parser synced with theirs { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); if (buffEx.outBuffers.Count > 0 && buffEx.outBuffers.Peek().GetPayload().Length != sampleSize) { buffEx.outBuffers.Dequeue();//throw this one away } } while (buffEx.outBuffers.Count == 0); started = true; } else { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); } while (buffEx.outBuffers.Count == 0); started = true; } //write out the vid data. buffer.Limit(sampleSize); buffer.Position(0); //if (tmpB.Length < sampleSize) tmpB = new byte[sampleSize]; buffer.Get(tmpB); fs.Write(tmpB); buffer.Limit(buffer.Capacity());//reset the limit for next sample buffer.Position(0); fs2.Write(buffEx.outBuffers.Dequeue().GetPayload()); if (!inStream.CanRead) isEOS = true;//end of stream. //decoder.QueueInputBuffer(inIndex, 0, sampleSize, extractor.SampleTime, 0); await extractor.AdvanceAsync().ConfigureAwait(false); //extractor.AdvanceAsync(); } } } // All decoded frames have been rendered, we can stop playing now if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Android.Util.Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); break; } } //decoder.Stop(); } } }
override public async void Run() { //Android.Media.MediaExtractor extractor; Android.Media.MediaCodec decoder = null; using (var extractor = new Android.Media.MediaExtractor()) //using (Android.Media.MediaCodec decoder = null) { //extractor = new Android.Media.MediaExtractor(); try { await extractor.SetDataSourceAsync(SAMPLE).ConfigureAwait(false); } catch (Exception ex) { var s = ex.ToString(); return; } for (int i = 0; i < extractor.TrackCount; i++) { var format = extractor.GetTrackFormat(i); Log.Debug("Format info: ", format.ToString()); String mime = format.GetString(Android.Media.MediaFormat.KeyMime); if (mime.StartsWith("video/")) { Log.Debug("Format mime: ", mime); //Log.Debug("Format " + MediaFormat.KeyMaxInputSize + ": ", // format.GetInteger(MediaFormat.KeyMaxInputSize).ToString()); Log.Debug("Format " + MediaFormat.KeyWidth + ": ", format.GetInteger(MediaFormat.KeyWidth).ToString()); Log.Debug("Format " + MediaFormat.KeyHeight + ": ", format.GetInteger(MediaFormat.KeyHeight).ToString()); PrintFormatInfo(format); extractor.SelectTrack(i); //decoder = Android.Media.MediaCodec.CreateDecoderByType(mime); //this is where the Xamarin Android VM dies. //decoder.Configure(format, surface, null, 0); break; } } //if (decoder == null) //{ // Android.Util.Log.Error("DecodeActivity", "Can't find video info!"); // return;//can't continue... //} var f = new Java.IO.File(dir + "decode.out"); if (f.Exists()) { f.Delete(); } f.CreateNewFile(); var f2 = new Java.IO.File(dir + "decode2.out"); if (f2.Exists()) { f2.Delete(); } f2.CreateNewFile(); //open the file for our custom extractor var inInfo = new System.IO.FileInfo(SAMPLE); if (!inInfo.Exists) { Log.Error("input file not found!", inInfo.FullName); return; } using (var inStream = inInfo.OpenRead()) using (var fs2 = new Java.IO.FileOutputStream(f2)) //get an output stream using (var fs = new Java.IO.FileOutputStream(f)) //get an output stream { //var inputBuffers = decoder.GetInputBuffers(); //var outputBuffers = decoder.GetOutputBuffers(); var info = new Android.Media.MediaCodec.BufferInfo(); bool started = false, isEOS = false; var sw = new System.Diagnostics.Stopwatch(); long startMs = sw.ElapsedMilliseconds; sw.Start(); byte[] peekBuf = new byte[188]; //for dumping the sample into instead of the decoder. var buffer = Java.Nio.ByteBuffer.Allocate(165000);// decoder.GetInputBuffer(inIndex); var buffEx = new BufferExtractor(); var tmpB = new byte[20000]; while (!interrupted) { //sw.Restart(); if (!isEOS) { int inIndex = 1;// decoder.DequeueInputBuffer(10000); if (inIndex >= 0) { buffer.Position(0);//reset the buffer if (buffer.Position() != 0) { Log.Debug("inBuff.Position: ", buffer.Position().ToString()); } Log.Debug("inBuff: ", buffer.ToString()); int sampleSize = extractor.ReadSampleData(buffer, 0); if (sampleSize < 0) { // We shouldn't stop the playback at this point, just pass the EOS // flag to decoder, we will get it again from the // dequeueOutputBuffer Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); //decoder.QueueInputBuffer(inIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); isEOS = true; } else { if (peekBuf.Length < sampleSize) { peekBuf = new byte[sampleSize]; } peekBuf.Initialize(); //clear old data. buffer.Get(peekBuf); buffer.Position(0); //reset for the decoder for (int i = 4; i < peekBuf.Length; ++i) { if (peekBuf[i] == 0x01 && peekBuf[i - 1] == 0x00 && peekBuf[i - 2] == 0x00 && peekBuf[i - 3] == 0x00) { Log.Debug("Found h264 start code: ", string.Format("i={0} of {1}", i, sampleSize)); } } Log.Debug("ExtractorActivity, sampleSize: ", sampleSize.ToString()); if (!started)//get your parser synced with theirs { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); if (buffEx.outBuffers.Count > 0 && buffEx.outBuffers.Peek().GetPayload().Length != sampleSize) { buffEx.outBuffers.Dequeue();//throw this one away } } while (buffEx.outBuffers.Count == 0); started = true; } else { do { peekBuf = new byte[188]; await inStream.ReadAsync(peekBuf, 0, peekBuf.Length) .ConfigureAwait(false); buffEx.AddRaw(peekBuf); } while (buffEx.outBuffers.Count == 0); started = true; } //write out the vid data. buffer.Limit(sampleSize); buffer.Position(0); //if (tmpB.Length < sampleSize) tmpB = new byte[sampleSize]; buffer.Get(tmpB); fs.Write(tmpB); buffer.Limit(buffer.Capacity());//reset the limit for next sample buffer.Position(0); fs2.Write(buffEx.outBuffers.Dequeue().GetPayload()); if (!inStream.CanRead) { isEOS = true;//end of stream. } //decoder.QueueInputBuffer(inIndex, 0, sampleSize, extractor.SampleTime, 0); await extractor.AdvanceAsync().ConfigureAwait(false); //extractor.AdvanceAsync(); } } } // All decoded frames have been rendered, we can stop playing now if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0) { Android.Util.Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); break; } } //decoder.Stop(); } } }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); codec = Android.Media.MediaCodec.CreateDecoderByType("video/avc"); sview = new SurfaceView(this); SetContentView(sview); sview.Touch += Sview_Touch; sview.Holder.AddCallback(new SurfsUp() { created = () => { codec.Configure(Android.Media.MediaFormat.CreateVideoFormat("video/avc", 1920, 1080), sview.Holder.Surface, null, Android.Media.MediaCodecConfigFlags.None); codec.SetOutputSurface(sview.Holder.Surface); codec.Start(); Queue <PendingPacket> pendingPackets = new Queue <PendingPacket>(); Queue <PendingSurface> pendingFrames = new Queue <PendingSurface>(); AutoResetEvent evt = new AutoResetEvent(false); System.Threading.Thread renderThread = new Thread(delegate() { long currentTimestamp = 0; //Time since last frame System.Diagnostics.Stopwatch mwatch = new System.Diagnostics.Stopwatch(); while (running) { PendingSurface surface = null; lock (pendingFrames) { if (pendingFrames.Count > 0) { surface = pendingFrames.Dequeue(); } } if (surface == null) { evt.WaitOne(); } else { if (currentTimestamp != 0) { /*int sleeptime = (int)(surface.presentationTimestamp-currentTimestamp); * sleeptime -= (int)mwatch.ElapsedMilliseconds; * mwatch.Reset(); * mwatch.Start(); * if (sleeptime>0) * { * System.Threading.Thread.Sleep(sleeptime); * }*/ } currentTimestamp = surface.presentationTimestamp; codec.ReleaseOutputBuffer(surface.ID, true); } } }); renderThread.Start(); System.Threading.Thread decodeThread = new Thread(delegate() { while (running) { PendingPacket _packet = null; lock (pendingPackets) { if (pendingPackets.Any()) { _packet = pendingPackets.Dequeue(); } } if (_packet == null) { evt.WaitOne(); continue; } byte[] packet = _packet.data; long timestamp = _packet.timestamp; int id = codec.DequeueInputBuffer(-1); using (var buffy = codec.GetInputBuffer(id)) { if (buffy.Capacity() >= packet.Length) { Marshal.Copy(packet, 0, buffy.GetDirectBufferAddress(), packet.Length); codec.QueueInputBuffer(id, 0, packet.Length, timestamp, Android.Media.MediaCodecBufferFlags.None); using (var info = new Android.Media.MediaCodec.BufferInfo()) { while (true) { int idx = codec.DequeueOutputBuffer(info, 0); if (idx >= 0) { var sval = new PendingSurface() { ID = idx, presentationTimestamp = info.PresentationTimeUs / 1000 }; lock (pendingFrames) { pendingFrames.Enqueue(sval); evt.Set(); } } else { break; } } } } } } }); decodeThread.Start(); BinaryReader mreader = new BinaryReader(connection); System.Threading.Thread netThread = new Thread(delegate() { while (running) { try { switch (mreader.ReadByte()) { case 0: { long timestamp = mreader.ReadInt64(); int length = mreader.ReadInt32(); byte[] packet = mreader.ReadBytes(length); PendingPacket mpacket = new PendingPacket() { data = packet, timestamp = timestamp }; lock (pendingPackets) { pendingPackets.Enqueue(mpacket); } } break; case 14: ulong forme = mreader.ReadUInt64(); break; case 15: { ulong foranothercursor = mreader.ReadUInt64(); MemoryStream cstr = new MemoryStream(mreader.ReadBytes(mreader.ReadInt32())); //Android doesn't support mouse cursors (at least not to my knowledge) } break; } } catch (Exception er) { } evt.Set(); } }); netThread.Start(); } }); }
protected override async void Run() { //so we can restart the player. decoder = MediaCodec.CreateDecoderByType("video/avc"); running = true; var finfo = new System.IO.FileInfo(FilePlayer.SAMPLE); var fs = finfo.OpenRead(); int bytes, count, inIndex =0; int buffSize = 188; var buff = new byte[buffSize]; var ts = new MpegTS.TsPacket(buff); buffEx = new BufferExtractor(); buffEx.SampleReady += SampleReadyCallbackHandler; //buffEx.Callback = this; bool eof = false; info = new Android.Media.MediaCodec.BufferInfo(); using(info) using(decoder) using (fs) { count = 0; Log.Debug(TAG, "looking for format info"); ////look for the format info. //do //{ // bytes = await fs.ReadAsync(buff, 0, buff.Length).ConfigureAwait(false); // Log.Debug(TAG, "PID: " + string.Format("{0}", ts.PID)); //} while (BitConverter.ToInt32(buff, formatStartI) != formatStartVal); //Log.Debug(TAG, "found format info"); //var tmpB = new byte[23 + 8]; //System.Buffer.BlockCopy(buff, formatStartI - 4, tmpB, 0, tmpB.Length); InitializeDecoder(); // decoder.Configure(format, surface, null, MediaCodecConfigFlags.None); // decoder.Start(); // // inputBuffers = decoder.GetInputBuffers(); // // fs.Position = 0;//reset sw.Restart(); //bool started = false; //do { ++count; try { while ( fs.CanRead && running)//buffEx.SampleCount == 0) { if (fs.Length - fs.Position < 188) { eof = true; break;//we're @ EOF } //we need a new buffer every loop! buff = new byte[188]; bytes = await fs.ReadAsync(buff, 0, buff.Length) .ConfigureAwait(false); //push the raw data to our custom extractor if (!buffEx.AddRaw(buff)) { Log.Debug("ExtractorActivity, "," ----------bad TS packet!"); //find next sync byte and try again fs.Position -= buff.Length - buff.ToList().IndexOf(MpegTS.TsPacket.SyncByte); } if (buffEx.SampleCount > 1) await Task.Delay(10); } //if (!fs.CanRead || eof) // break; } catch (Exception ex) { Log.Error("ExtractorActivity error: ", ex.ToString()); } ////get the raw video stream, stripped of Mpeg TS headers //var buf = buffEx.DequeueNextSample(); //Log.Debug("ExtractorActivity, sampleSize: ", buf.Length.ToString()); //var outputBuffers = decoder.GetOutputBuffers(); ////get a input buffer index from the decoder for input //inIndex = decoder.DequeueInputBuffer(10000); //if (inIndex >= 0) //{ // //get the re-assembled video data from the extractor // using (var b = Java.Nio.ByteBuffer.Wrap(buf.Buffer)) // { // var inB = inputBuffers[inIndex]; // //************* // //THE BUFFER *******MUST********* be CLEARED before each write, // //else when the buffers start getting recycled, the decoder will // //read past the end of the current data into old data! // //This may cause tearing of the picture, or even a complete // //crash of the app from internal errors in native decoder code!!!!! // inB.Clear(); // inB.Put(b); // decoder.QueueInputBuffer(inIndex, 0, b.Limit(), 0, MediaCodecBufferFlags.None); // }// b.Dispose();//clean up //} ////else //// continue;//we don't have a full video frame, look for more. ////check decoder output/state //int outIndex = decoder.DequeueOutputBuffer(info, 10000); //switch ((Android.Media.MediaCodecInfoState)outIndex) //{ // case MediaCodecInfoState.OutputBuffersChanged: // Android.Util.Log.Debug("DecodeActivity", // MediaCodecInfoState.OutputBuffersChanged.ToString()); // outputBuffers = decoder.GetOutputBuffers(); // break; // case MediaCodecInfoState.OutputFormatChanged: // Android.Util.Log.Debug("DecodeActivity", "New format " + decoder.OutputFormat);//.GetOutputFormat(outIndex)); // break; // case MediaCodecInfoState.TryAgainLater: // Android.Util.Log.Debug("DecodeActivity", "dequeueOutputBuffer timed out!"); // break; // default: // var buffer = outputBuffers[outIndex];// decoder.GetOutputBuffer(outIndex); // Android.Util.Log.Verbose("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer); // //bool gcDone = false; // // We use a very simple clock to keep the video FPS, or the video // // playback will be too fast // //This causes the next frame to not be rendered too quickly. // while (info.PresentationTimeUs / 1000 > sw.ElapsedMilliseconds) // { // await Task.Delay(10).ConfigureAwait(false); // } // //the decoder won't advance without this... // //must be called before the next decoder.dequeue call // decoder.ReleaseOutputBuffer(outIndex, true); // break; //} }//while (fs.CanRead && running); Log.Debug("DecodeActivity", MediaCodecBufferFlags.EndOfStream.ToString()); try { decoder.QueueInputBuffer(inIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream); } catch(Exception ex) { Log.Debug("DecodeActivity", "error closing decoder!"); } }//dispose filestream,decoder, info }