Exemple #1
0
        protected async void InitializeDecoder()
        {
            decoder.Configure(format, surface, null, MediaCodecConfigFlags.None);
            decoder.Start();

            inputBuffers  = decoder.GetInputBuffers();
            outputBuffers = decoder.GetOutputBuffers();
        }
 private MediaCodecWrapper(MediaCodec codec)
 {
     mDecoder = codec;
     codec.Start();
     mInputBuffers           = codec.GetInputBuffers();
     mOutputBuffers          = codec.GetOutputBuffers();
     mOutputBufferInfo       = new MediaCodec.BufferInfo[mOutputBuffers.Length];
     mAvailableInputBuffers  = new Queue <int>(mOutputBuffers.Length);
     mAvailableOutputBuffers = new Queue <int>(mInputBuffers.Length);
 }
        private bool WriteAudio()
        {
            int index = audioEncoder.DequeueInputBuffer(-1);

            if (index >= 0)
            {
                ByteBuffer[] inputBuffers = audioEncoder.GetInputBuffers();
                ByteBuffer   buffer       = inputBuffers[index];

                var len = Utils.Clamp(audioData.Length - audioDataIdx, 0, buffer.Remaining());
                buffer.Clear();
                buffer.Put(audioData, audioDataIdx, len);

                long presentationTime = (audioDataIdx * SecondsToMicroSeconds) / (44100 * 2);
                audioDataIdx += len;

                var done = audioDataIdx == audioData.Length;
                audioEncoder.QueueInputBuffer(index, 0, len, presentationTime, done ? MediaCodecBufferFlags.EndOfStream : MediaCodecBufferFlags.None);
            }

            return(audioDataIdx < audioData.Length);
        }
Exemple #4
0
/**
 * Work loop.
 */
        private void doExtract(MediaExtractor extractor, int trackIndex, MediaCodec decoder, CodecOutputSurface outputSurface)
        {
            Stopwatch stopWatch    = new Stopwatch();
            const int TIMEOUT_USEC = 10000;

            ByteBuffer []         decoderInputBuffers = decoder.GetInputBuffers();
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int inputChunk             = 0;
            int decodeCount            = 0;
            var frameTimestamps        = new List <long>();

            bool outputDone = false;
            bool inputDone  = false;


            //speed vs accuracy tradeoffs https://stackoverflow.com/questions/34132444/google-mobile-vision-poor-facedetector-performance-without-camerasource
            //reducing bitmap resolution helps the most and thats ok because i'm not using them after
            var detector = new FaceDetector.Builder(Application.Context)
                           .SetTrackingEnabled(true)        //tracking enables false makes it much slow wtf?!?!
                           .SetClassificationType(ClassificationType.All)
                           .SetProminentFaceOnly(true)      // no diff
                                                            //.SetMinFaceSize((float)0.1) //small performance gain when removed
                           .SetMode(FaceDetectionMode.Fast) // tiny small performance gain
                           .Build();



            while (!outputDone)
            {
                stopWatch.Start();
                // Feed more data to the decoder.
                if (!inputDone)
                {
                    int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC);
                    if (inputBufIndex >= 0)
                    {
                        ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                        // Read the sample data into the ByteBuffer.  This neither respects nor
                        // updates inputBuf's position, limit, etc.
                        int chunkSize = extractor.ReadSampleData(inputBuf, 0);
                        if (chunkSize < 0)
                        {
                            // End of stream -- send empty frame with EOS flag set.
                            decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BufferFlagEndOfStream);
                            inputDone = true;
                            //if (VERBOSE) Log.d(TAG, "sent input EOS");
                        }
                        else
                        {
                            if (extractor.SampleTrackIndex != trackIndex)
                            {
                                //Log.w(TAG, "WEIRD: got sample from track " + extractor.getSampleTrackIndex() + ", expected " + trackIndex);
                            }

                            frameTimestamps.Add(extractor.SampleTime); //might need to play with offset here to get right sync from decoder
                            decoder.QueueInputBuffer(inputBufIndex, 0, chunkSize, extractor.SampleTime, 0 /*flags*/);
                            //if (VERBOSE) {
                            //    Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                            //            chunkSize);
                            //}
                            inputChunk++;
                            extractor.Advance();
                        }
                    }
                    else
                    {
                        //if (VERBOSE) Log.d(TAG, "input buffer not available");
                    }
                }

                if (!outputDone)
                {
                    int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // no output available yet
                        //if (VERBOSE) Log.d(TAG, "no output from decoder available");
                    }
                    else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        // not important for us, since we're using Surface
                        //if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
                    }
                    else if (decoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        //MediaFormat newFormat = decoder.OutputFormat;
                        //if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
                    }
                    else if (decoderStatus < 0)
                    {
                        //fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
                        throw new InvalidOperationException();
                    }
                    else
                    {
                        //if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")");
                        if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                        {
                            //if (VERBOSE) Log.d(TAG, "output EOS");
                            outputDone = true;
                        }

                        bool doRender = (info.Size != 0);

                        //could not get this working!!!
                        // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                        // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
                        // that the texture will be available before the call returns, so we
                        // need to wait for the onFrameAvailable callback to fire.

                        decoder.ReleaseOutputBuffer(decoderStatus, doRender);

                        if (doRender)
                        {
                            //outputSurface.awaitNewImage(); //could not get callback to work and even so do not want to wait 2.5 seconds for each frame, might need to revist

                            outputSurface.mTextureRender.checkGlError("before updateTexImage");
                            outputSurface.mSurfaceTexture.UpdateTexImage();
                            outputSurface.drawImage(true);
                            //Log.Info("innerSTOPWATCH_begin!!!!:", stopWatch.ElapsedMilliseconds.ToString());
                            //can't call face detector this way its too slow or maybe there is a busy loop???
                            //_FaceFetchDataTasks.Add(Task.Run(() => CreateFaceframes(detector, outputSurface.GetFramebitmap(), decodeCount, frameTimestamps[decodeCount])));
                            //if (decodeCount % 2 ==0) //doesn't help that much and messes with rating algo
                            CreateFaceframes(detector, outputSurface.GetFramebitmap(), frameTimestamps[decodeCount]);
                            //Log.Info("innerSTOPWATCH_end!!!!:", stopWatch.ElapsedMilliseconds.ToString());

                            decodeCount++;
                        }
                    }
                }
            }

            stopWatch.Stop();
            Log.Info("inner STOPWATCH!!!!:", string.Format("numberofframes = {0}, totaltime = {1}", decodeCount, stopWatch.ElapsedMilliseconds));
            detector.Release();
        }
Exemple #5
0
        public void decode(byte[] array)
        {
            if (bConfigured == false)
            {
                Init();
            }

            var nalType = array[4] & 0x1f;

//Console.WriteLine("nal:" + nalType);
            if (nalType == 7)
            {
                //sps = array.ToArray();
                if (array.Length != sps.Length)
                {
                    stop();
                    sps = array.ToArray();
                    Init();
                }
                return;
            }
            if (nalType == 8)
            {
                //pps = array.ToArray();
                return;
            }
            if (bConfigured == false)
            {
                return;
            }

            //Make sure keyframe is first.
            if (nalType == 5)
            {
                bWaitForKeyframe = false;
                //pps = array.ToArray();
                //return;
            }
            if (bWaitForKeyframe)
            {
                return;
            }

            if (bConfigured)
            {
                try
                {
                    ByteBuffer[] inputBuffers       = codec.GetInputBuffers();
                    ByteBuffer[] outputBuffers      = codec.GetOutputBuffers();
                    int          dequeueInputBuffer = codec.DequeueInputBuffer(-1L);
                    if (dequeueInputBuffer >= 0)
                    {
                        //Send data to decoder.
                        ByteBuffer byteBuffer = inputBuffers[dequeueInputBuffer];
                        byteBuffer.Clear();
                        byteBuffer.Put(array);
                        codec.QueueInputBuffer(dequeueInputBuffer, 0, array.Length, 0L, 0);
                    }

                    //Show decoded frame
                    MediaCodec.BufferInfo BufferInfo = new MediaCodec.BufferInfo();
                    int i = codec.DequeueOutputBuffer(BufferInfo, 0L);
                    while (i >= 0)
                    {
                        /*if (picSurface == null)//Only if not using display surface.
                         * {
                         *  ByteBuffer byteBuffer2 = outputBuffers[i];
                         *  if (buffer == null || buffer.Length != BufferInfo.Size)
                         *  {
                         *      buffer = new byte[BufferInfo.Size];
                         *  }
                         *  byteBuffer2.Get(buffer);
                         *  //do something with raw frame in buffer.
                         * }*/

                        codec.ReleaseOutputBuffer(i, true);
                        codec.SetVideoScalingMode(VideoScalingMode.ScaleToFit);

                        i = codec.DequeueOutputBuffer(BufferInfo, 0L);
                    }
                }
                catch (Exception ex)
                {
                    MainActivity.getActivity().notifyUser("VideoDecoder decode exception " + ex.Message, false);

                    //attempt to recover.
                    //codec.Release();
                    //codec = null;
                    //bConfigured = false;

                    stop();
                }
            }
            return;// ret;
        }
        public void decode(byte[] array)
        {
            if (bConfigured == false)
            {
                Init();
            }

            var nalType = array[4] & 0x1f;
            if (nalType == 7)
            {
                //sps = array.ToArray();
                if (array.Length != sps.Length)
                {
                    stop();
                    sps = array.ToArray();
                    Init();
                }
                return;
            }
            if (nalType == 8)
            {
                //pps = array.ToArray();
                return;
            }
            if (bConfigured == false)
            {
                return;
            }

            if (bConfigured)
            {
                try
                {
                    ByteBuffer[] inputBuffers = codec.GetInputBuffers();
                    ByteBuffer[] outputBuffers = codec.GetOutputBuffers();
                    int dequeueInputBuffer = codec.DequeueInputBuffer(-1L);
                    if (dequeueInputBuffer >= 0)
                    {
                        //Send data to decoder. 
                        ByteBuffer byteBuffer = inputBuffers[dequeueInputBuffer];
                        byteBuffer.Clear();
                        byteBuffer.Put(array);
                        codec.QueueInputBuffer(dequeueInputBuffer, 0, array.Length, 0L, 0);
                    }

                    //Show decoded frame
                    MediaCodec.BufferInfo BufferInfo = new MediaCodec.BufferInfo();
                    int i = codec.DequeueOutputBuffer(BufferInfo, 0L);
                    while (i >= 0)
                    {
                        /*if (picSurface == null)//Only if not using display surface. 
                        {
                            ByteBuffer byteBuffer2 = outputBuffers[i];
                            if (buffer == null || buffer.Length != BufferInfo.Size)
                            {
                                buffer = new byte[BufferInfo.Size];
                            }
                            byteBuffer2.Get(buffer);
                            //do something with raw frame in buffer. 
                        }*/

                        codec.ReleaseOutputBuffer(i, true);
                        codec.SetVideoScalingMode(VideoScalingMode.ScaleToFit);

                        i = codec.DequeueOutputBuffer(BufferInfo, 0L);
                    }
                }
                catch (Exception ex)
                {
                    //attempt to recover.
                    stop();
                }
            }
            return;// ret;
        }
Exemple #7
0
        private void EncodeMux()
        {
            int TIMEOUT_USEC = 10000;

            ByteBuffer[] encoderInputBuffers = _Encoder.GetInputBuffers();

            bool inputDone  = false;
            int  frameIndex = 0;

            try
            {
                while (true)
                {
                    if (!inputDone)
                    {
                        int inputBufIndex = _Encoder.DequeueInputBuffer(TIMEOUT_USEC);
                        if (inputBufIndex >= 0)
                        {
                            long ptsUsec = computePresentationTime(frameIndex);
                            if (frameIndex == _ByteBuffers.Count)
                            {
                                //  Send an empty frame with the end-of-stream flag set.  If we set EOS on a frame with data, that frame data will be ignored, and the output will be short one frame.
                                _Encoder.QueueInputBuffer(inputBufIndex, 0, 0, ptsUsec, MediaCodec.BufferFlagEndOfStream);
                                inputDone = true;
                                Log.Info(TAG, "sent input EOS (with zero-length frame)");
                            }
                            else
                            {
                                Log.Info(TAG, string.Format("Adding _ByteBuffers image index {0} to encoder", frameIndex));
                                ByteBuffer inputBuf  = encoderInputBuffers[inputBufIndex];
                                var        imagedata = _ByteBuffers[frameIndex];
                                int        chunkSize = 0;

                                if (imagedata == null)
                                {
                                    Log.Warn(TAG, string.Format("Adding _ByteBuffers image index {0} to encoder", frameIndex));
                                }
                                else
                                {
                                    //old way don't need to do this anymore.
                                    //Bitmap b = GetBitmap(imagedata);
                                    //byte[] yuv = new byte[b.Width * b.Height * 3 / 2];
                                    //int[] argb = new int[b.Width * b.Height];
                                    //b.GetPixels(argb, 0, b.Width, 0, 0, b.Width, b.Height);
                                    //encodeYUV420SP(yuv, argb, b.Width, b.Height);
                                    //b.Recycle();
                                    //old way don't need to do this anymore?

                                    //int[] argb = new int[imagedata.Width * imagedata.Height];
                                    //imagedata.GetPixels(argb, 0, imagedata.Width, 0, 0, imagedata.Width, imagedata.Height);
                                    //byte[] yuv = new byte[imagedata.Width * imagedata.Height * 3 / 2];
                                    //encodeYUV420SP(yuv, argb, imagedata.Width, imagedata.Height);
                                    //YuvImage yuv = GetYUVImage(imagedata);

                                    //byte[] decomB = Utils.DecompressFast(imagedata);

                                    //var yuv = new YuvImage(decomB, _CameraColorFormat, _Width, _Height, null);
                                    //Bitmap b = BitmapFactory.DecodeByteArray(imagedata, 0, imagedata.Length);
                                    //byte[] yuv = new byte[b.Width * b.Height * 3 / 2];
                                    //int[] argb = new int[b.Width * b.Height];
                                    //b.GetPixels(argb, 0, b.Width, 0, 0, b.Width, b.Height);
                                    //encodeYUV420SP(yuv, argb, b.Width, b.Height);

                                    Bitmap b    = BitmapFactory.DecodeByteArray(imagedata, 0, imagedata.Length);
                                    byte[] yuv  = new byte[b.Width * b.Height * 3 / 2];
                                    int[]  argb = new int[b.Width * b.Height];
                                    b.GetPixels(argb, 0, b.Width, 0, 0, b.Width, b.Height);
                                    encodeYUV420SP(yuv, argb, b.Width, b.Height);
                                    var yuvimage = new YuvImage(yuv, _CameraColorFormat, _Width, _Height, null);
                                    var yuvarray = yuvimage.GetYuvData();
                                    colorcorrection(ref yuvarray, b.Width, b.Height); //method for fixing common color matching issues see below for comments

                                    inputBuf.Put(yuvarray);

                                    chunkSize = yuvarray.Length;
                                    //yuv = null;
                                    //GC.Collect(); //essential to fix memory leak from new YuvImage allocation above
                                    b.Recycle();
                                }


                                //  the buffer should be sized to hold one full frame
                                inputBuf.Clear();
                                _Encoder.QueueInputBuffer(inputBufIndex, 0, chunkSize, ptsUsec, 0);
                                frameIndex++;
                            }
                        }
                        else
                        {
                            //  either all in use, or we timed out during initial setup
                            Log.Warn(TAG, "input buffer not available");
                        }
                    }

                    ByteBuffer[] encoderOutputBuffers = _Encoder.GetOutputBuffers();
                    var          mBufferInfo          = new MediaCodec.BufferInfo();

                    int encoderStatus = _Encoder.DequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);

                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        Log.Info(TAG, "no output available, spinning to await EOS");
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        //  not expected for an encoder
                        Log.Warn(TAG, "not expected OutputBuffersChanged happened");
                        encoderOutputBuffers = _Encoder.GetOutputBuffers();
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        //  should happen before receiving buffers, and should only happen once
                        if (_MuxerStarted)
                        {
                            Log.Error(TAG, "format changed twice and should never happen");
                            throw new RuntimeException("format changed twice");
                        }

                        MediaFormat newFormat = _Encoder.OutputFormat;

                        Log.Info(TAG, "format changed and starting MUX");
                        _TrackIndex = _Muxer.AddTrack(newFormat);
                        _Muxer.Start();
                        _MuxerStarted = true;
                    }
                    else if (encoderStatus < 0)
                    {
                        Log.Warn(TAG, "unexpected but lets ignore");
                        //  let's ignore it
                    }
                    else
                    {
                        ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            Log.Error(TAG, string.Format("encoderOutputBuffer {0} was null!!", encoderStatus));
                            throw new RuntimeException(string.Format("encoderOutputBuffer {0} was null!!", encoderStatus));
                        }

                        if ((mBufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0)
                        {
                            //  The codec config data was pulled out and fed to the muxer when we got
                            //  the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                            mBufferInfo.Size = 0;
                        }

                        if (mBufferInfo.Size != 0)
                        {
                            if (!_MuxerStarted)
                            {
                                Log.Error(TAG, "muxer hasnt started!!");
                                throw new RuntimeException("muxer hasnt started");
                            }

                            //  adjust the ByteBuffer values to match BufferInfo (not needed?) old
                            //encodedData.Position(mBufferInfo.Offset);
                            //encodedData.Limit(mBufferInfo.Offset + this.mBufferInfo.Size);

                            _Muxer.WriteSampleData(_TrackIndex, encodedData, mBufferInfo);
                            Log.Info(TAG, string.Format("{0} bytes to muxer", mBufferInfo.Size));
                        }

                        _Encoder.ReleaseOutputBuffer(encoderStatus, false);
                        if ((mBufferInfo.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                        {
                            Log.Info(TAG, "End of Stream Reached!!");
                            break;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                Log.Error(TAG, "Decode or Muxer failed", e, e.Message);
                throw;
            }
        }
        private void ResampleVideo(MediaExtractor extractor, MediaCodec decoder, SamplerClip clip)
        {
            ByteBuffer[]          decoderInputBuffers  = decoder.GetInputBuffers();
            ByteBuffer[]          encoderOutputBuffers = mEncoder.GetOutputBuffers();
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int inputChunk             = 0;
            int outputCount            = 0;

            long endTime = clip.getEndTime();

            if (endTime == -1)
            {
                endTime = clip.getVideoDuration();
            }

            bool outputDoneNextTimeWeCheck = false;

            bool outputDone  = false;
            bool inputDone   = false;
            bool decoderDone = false;

            while (!outputDone)
            {
                // Feed more data to the decoder.
                if (!inputDone)
                {
                    int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC);
                    if (inputBufIndex >= 0)
                    {
                        if (extractor.SampleTime / 1000 >= endTime)
                        {
                            // End of stream -- send empty frame with EOS flag set.
                            decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodecBufferFlags.EndOfStream);
                            inputDone = true;
                        }
                        else
                        {
                            // Copy a chunk of input to the decoder. The first chunk should have
                            // the BUFFER_FLAG_CODEC_CONFIG flag set.
                            ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                            inputBuf.Clear();

                            int sampleSize = extractor.ReadSampleData(inputBuf, 0);
                            if (sampleSize < 0)
                            {
                                decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0, MediaCodecBufferFlags.EndOfStream);
                            }
                            else
                            {
                                decoder.QueueInputBuffer(inputBufIndex, 0, sampleSize, extractor.SampleTime, 0);
                                extractor.Advance();
                            }

                            inputChunk++;
                        }
                    }
                }

                // Assume output is available. Loop until both assumptions are false.
                bool decoderOutputAvailable = !decoderDone;
                bool encoderOutputAvailable = true;
                while (decoderOutputAvailable || encoderOutputAvailable)
                {
                    // Start by draining any pending output from the encoder. It's important to
                    // do this before we try to stuff any more data in.
                    int encoderStatus = mEncoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        encoderOutputAvailable = false;
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        encoderOutputBuffers = mEncoder.GetOutputBuffers();
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        MediaFormat newFormat = mEncoder.OutputFormat;

                        mTrackIndex = mMuxer.AddTrack(newFormat);
                        mMuxer.Start();
                        mMuxerStarted = true;
                    }
                    else if (encoderStatus < 0)
                    {
                        // fail( "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus );
                    }
                    else
                    { // encoderStatus >= 0
                        ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            // fail( "encoderOutputBuffer " + encoderStatus + " was null" );
                        }
                        // Write the data to the output "file".
                        if (info.Size != 0)
                        {
                            encodedData.Position(info.Offset);
                            encodedData.Limit(info.Offset + info.Size);
                            outputCount++;

                            mMuxer.WriteSampleData(mTrackIndex, encodedData, info);
                        }
                        outputDone = (info.Flags & MediaCodecBufferFlags.EndOfStream) != 0;

                        mEncoder.ReleaseOutputBuffer(encoderStatus, false);
                    }

                    if (outputDoneNextTimeWeCheck)
                    {
                        outputDone = true;
                    }

                    if (encoderStatus != (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // Continue attempts to drain output.
                        continue;
                    }
                    // Encoder is drained, check to see if we've got a new frame of output from
                    // the decoder. (The output is going to a Surface, rather than a ByteBuffer,
                    // but we still get information through BufferInfo.)
                    if (!decoderDone)
                    {
                        int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                        if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                        {
                            decoderOutputAvailable = false;
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                        {
                            // decoderOutputBuffers = decoder.GetOutputBuffers();
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                        {
                            // expected before first buffer of data
                            MediaFormat newFormat = decoder.OutputFormat;
                        }
                        else if (decoderStatus < 0)
                        {
                            // fail( "unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus );
                        }
                        else
                        { // decoderStatus >= 0
                            // The ByteBuffers are null references, but we still get a nonzero
                            // size for the decoded data.
                            bool doRender = (info.Size != 0);
                            // As soon as we call ReleaseOutputBuffer, the buffer will be forwarded
                            // to SurfaceTexture to convert to a texture. The API doesn't
                            // guarantee that the texture will be available before the call
                            // returns, so we need to wait for the onFrameAvailable callback to
                            // fire. If we don't wait, we risk rendering from the previous frame.
                            decoder.ReleaseOutputBuffer(decoderStatus, doRender);
                            if (doRender)
                            {
                                mOutputSurface.AwaitNewImage(true);
                                mOutputSurface.DrawImage();
                                // Send it to the encoder.

                                long nSecs = info.PresentationTimeUs * 1000;

                                if (clip.getStartTime() != -1)
                                {
                                    nSecs = (info.PresentationTimeUs - (clip.getStartTime() * 1000)) * 1000;
                                }

                                nSecs = Java.Lang.Math.Max(0, nSecs);

                                mEncoderPresentationTimeUs += (nSecs - mLastSampleTime);

                                mLastSampleTime = nSecs;

                                mInputSurface.SetPresentationTime(mEncoderPresentationTimeUs);
                                mInputSurface.SwapBuffers();
                            }
                            if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                            {
                                // mEncoder.signalEndOfInputStream();
                                outputDoneNextTimeWeCheck = true;
                            }
                        }
                    }
                }
            }
            if (inputChunk != outputCount)
            {
                // throw new RuntimeException( "frame lost: " + inputChunk + " in, " + outputCount + " out" );
            }
        }
        /**
         * Tries to obtain the SPS and the PPS for the encoder.
         */
        private long searchSPSandPPS()
        {
            ByteBuffer[] inputBuffers  = mEncoder.GetInputBuffers();
            ByteBuffer[] outputBuffers = mEncoder.GetOutputBuffers();
            BufferInfo   info          = new BufferInfo();

            byte[] csd = new byte[128];
            int    len = 0, p = 4, q = 4;
            long   elapsed = 0, now = timestamp();

            while (elapsed < 3000000 && (mSPS == null || mPPS == null))
            {
                // Some encoders won't give us the SPS and PPS unless they receive something to encode first...
                int bufferIndex = mEncoder.DequeueInputBuffer(1000000 / FRAMERATE);
                if (bufferIndex >= 0)
                {
                    check(inputBuffers[bufferIndex].Capacity() >= mData.Length, "The input buffer is not big enough.");
                    inputBuffers[bufferIndex].Clear();
                    inputBuffers[bufferIndex].Put(mData, 0, mData.Length);
                    mEncoder.QueueInputBuffer(bufferIndex, 0, mData.Length, timestamp(), 0);
                }
                else
                {
                    if (VERBOSE)
                    {
                        Log.e(TAG, "No buffer available !");
                    }
                }

                // We are looking for the SPS and the PPS here. As always, Android is very inconsistent, I have observed that some
                // encoders will give those parameters through the MediaFormat object (that is the normal behaviour).
                // But some other will not, in that case we try to find a NAL unit of type 7 or 8 in the byte stream outputed by the encoder...

                int index = mEncoder.DequeueOutputBuffer(info, 1000000 / FRAMERATE);

                if (index == (int)MediaCodecInfoState.OutputFormatChanged)
                {
                    // The PPS and PPS shoud be there
                    MediaFormat format = mEncoder.OutputFormat;
                    ByteBuffer  spsb   = format.GetByteBuffer("csd-0");
                    ByteBuffer  ppsb   = format.GetByteBuffer("csd-1");
                    mSPS = new byte[spsb.Capacity() - 4];
                    spsb.Position(4);
                    spsb.Get(mSPS, 0, mSPS.Length);
                    mPPS = new byte[ppsb.Capacity() - 4];
                    ppsb.Position(4);
                    ppsb.Get(mPPS, 0, mPPS.Length);
                    break;
                }
                else if (index == (int)MediaCodecInfoState.OutputBuffersChanged)
                {
                    outputBuffers = mEncoder.GetOutputBuffers();
                }
                else if (index >= 0)
                {
                    len = info.Size;
                    if (len < 128)
                    {
                        outputBuffers[index].Get(csd, 0, len);
                        if (len > 0 && csd[0] == 0 && csd[1] == 0 && csd[2] == 0 && csd[3] == 1)
                        {
                            // Parses the SPS and PPS, they could be in two different packets and in a different order
                            //depending on the phone so we don't make any assumption about that
                            while (p < len)
                            {
                                while (!(csd[p + 0] == 0 && csd[p + 1] == 0 && csd[p + 2] == 0 && csd[p + 3] == 1) && p + 3 < len)
                                {
                                    p++;
                                }
                                if (p + 3 >= len)
                                {
                                    p = len;
                                }
                                if ((csd[q] & 0x1F) == 7)
                                {
                                    mSPS = new byte[p - q];
                                    JavaSystem.Arraycopy(csd, q, mSPS, 0, p - q);
                                }
                                else
                                {
                                    mPPS = new byte[p - q];
                                    JavaSystem.Arraycopy(csd, q, mPPS, 0, p - q);
                                }
                                p += 4;
                                q  = p;
                            }
                        }
                    }
                    mEncoder.ReleaseOutputBuffer(index, false);
                }

                elapsed = timestamp() - now;
            }

            check(mPPS != null && mSPS != null, "Could not determine the SPS & PPS.");
            mB64PPS = Base64.EncodeToString(mPPS, 0, mPPS.Length, Base64Flags.NoWrap);
            mB64SPS = Base64.EncodeToString(mSPS, 0, mSPS.Length, Base64Flags.NoWrap);

            return(elapsed);
        }
        /**
         * Checks the video data.
         *
         * @return the number of bad frames
         */
        private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface)
        {
            const int TIMEOUT_USEC = 1000;

            ByteBuffer[]          decoderInputBuffers  = decoder.GetInputBuffers();
            ByteBuffer[]          decoderOutputBuffers = decoder.GetOutputBuffers();
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int  inputChunk            = 0;
            int  checkIndex            = 0;
            int  badFrames             = 0;
            bool outputDone            = false;
            bool inputDone             = false;

            while (!outputDone)
            {
                if (AppSettings.Logging.SendToConsole)
                {
                    Log.Debug(TAG, "check loop");
                }
                // Feed more data to the decoder.
                if (!inputDone)
                {
                    int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC);
                    if (inputBufIndex >= 0)
                    {
                        if (inputChunk == inputData.NumChunks)
                        {
                            // End of stream -- send empty frame with EOS flag set.
                            decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L,
                                                     MediaCodec.BufferFlagEndOfStream);
                            inputDone = true;
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "sent input EOS");
                            }
                        }
                        else
                        {
                            // Copy a chunk of input to the decoder. The first chunk should have
                            // the BUFFER_FLAG_CODEC_CONFIG flag set.
                            ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                            inputBuf.Clear();
                            inputData.getChunkData(inputChunk, inputBuf);
                            int  flags = inputData.getChunkFlags(inputChunk);
                            long time  = inputData.getChunkTime(inputChunk);
                            decoder.QueueInputBuffer(inputBufIndex, 0, inputBuf.Position(),
                                                     time, (MediaCodecBufferFlags)flags);
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                                          inputBuf.Position() + " flags=" + flags);
                            }
                            inputChunk++;
                        }
                    }
                    else
                    {
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "input buffer not available");
                        }
                    }
                }
                if (!outputDone)
                {
                    int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (decoderStatus == (int)MediaCodec.InfoTryAgainLater)
                    {
                        // no output available yet
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "no output from decoder available");
                        }
                    }
                    else if (decoderStatus == (int)MediaCodec.InfoOutputBuffersChanged)
                    {
                        decoderOutputBuffers = decoder.GetOutputBuffers();
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "decoder output buffers changed");
                        }
                    }
                    else if (decoderStatus == (int)MediaCodec.InfoOutputFormatChanged)
                    {
                        MediaFormat newFormat = decoder.OutputFormat;
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "decoder output format changed: " + newFormat);
                        }
                    }
                    else if (decoderStatus < 0)
                    {
                        fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
                    }
                    else                                                                         // decoderStatus >= 0
                    {
                        ByteBuffer decodedData = decoderOutputBuffers[decoderStatus];
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "surface decoder given buffer " + decoderStatus +
                                      " (size=" + info.Size + ")");
                        }
                        if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0)
                        {
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "output EOS");
                            }
                            outputDone = true;
                        }
                        bool doRender = (info.Size != 0);
                        // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                        // to SurfaceTexture to convert to a texture. The API doesn't guarantee
                        // that the texture will be available before the call returns, so we
                        // need to wait for the onFrameAvailable callback to fire.
                        decoder.ReleaseOutputBuffer(decoderStatus, doRender);
                        if (doRender)
                        {
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "awaiting frame " + checkIndex);
                            }
                            assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
                                         info.PresentationTimeUs);
                            surface.AwaitNewImage();
                            surface.DrawImage();
                            if (!checkSurfaceFrame(checkIndex++))
                            {
                                badFrames++;
                            }
                        }
                    }
                }
            }
            return(badFrames);
        }
        /**
         * Edits a stream of video data.
         */
        private void editVideoData(VideoChunks inputData, MediaCodec decoder,
                                   OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
                                   VideoChunks outputData)
        {
            const int TIMEOUT_USEC = 10000;

            ByteBuffer[]          decoderInputBuffers  = decoder.GetInputBuffers();
            ByteBuffer[]          encoderOutputBuffers = encoder.GetOutputBuffers();
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int  inputChunk            = 0;
            int  outputCount           = 0;
            bool outputDone            = false;
            bool inputDone             = false;
            bool decoderDone           = false;

            while (!outputDone)
            {
                if (AppSettings.Logging.SendToConsole)
                {
                    Log.Debug(TAG, "edit loop");
                }
                // Feed more data to the decoder.
                if (!inputDone)
                {
                    int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC);
                    if (inputBufIndex >= 0)
                    {
                        if (inputChunk == inputData.NumChunks)
                        {
                            // End of stream -- send empty frame with EOS flag set.
                            decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L,
                                                     MediaCodecBufferFlags.EndOfStream);
                            inputDone = true;
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "sent input EOS (with zero-length frame)");
                            }
                        }
                        else
                        {
                            // Copy a chunk of input to the decoder. The first chunk should have
                            // the BUFFER_FLAG_CODEC_CONFIG flag set.
                            ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                            inputBuf.Clear();
                            inputData.getChunkData(inputChunk, inputBuf);
                            int  flags = inputData.getChunkFlags(inputChunk);
                            long time  = inputData.getChunkTime(inputChunk);
                            decoder.QueueInputBuffer(inputBufIndex, 0, inputBuf.Position(),
                                                     time, (MediaCodecBufferFlags)flags);             // TODO: Not sure if it's MediaCodecBufferFlags, verify.
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                                          inputBuf.Position() + " flags=" + flags);
                            }
                            inputChunk++;
                        }
                    }
                    else
                    {
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "input buffer not available");
                        }
                    }
                }
                // Assume output is available. Loop until both assumptions are false.
                bool decoderOutputAvailable = !decoderDone;
                bool encoderOutputAvailable = true;
                while (decoderOutputAvailable || encoderOutputAvailable)
                {
                    // Start by draining any pending output from the encoder. It's important to
                    // do this before we try to stuff any more data in.
                    int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // no output available yet
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "no output from encoder available");
                        }
                        encoderOutputAvailable = false;
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        encoderOutputBuffers = encoder.GetOutputBuffers();
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "encoder output buffers changed");
                        }
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        MediaFormat newFormat = encoder.OutputFormat;
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "encoder output format changed: " + newFormat);
                        }
                    }
                    else if (encoderStatus < 0)
                    {
                        fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    }
                    else                         // encoderStatus >= 0
                    {
                        ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            fail("encoderOutputBuffer " + encoderStatus + " was null");
                        }
                        // Write the data to the output "file".
                        if (info.Size != 0)
                        {
                            encodedData.Position(info.Offset);
                            encodedData.Limit(info.Offset + info.Size);
                            outputData.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs);
                            outputCount++;
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "encoder output " + info.Size + " bytes");
                            }
                        }
                        outputDone = (info.Flags & MediaCodec.BufferFlagEndOfStream) != 0;
                        encoder.ReleaseOutputBuffer(encoderStatus, false);
                    }
                    if (encoderStatus != (int)MediaCodec.InfoTryAgainLater)
                    {
                        // Continue attempts to drain output.
                        continue;
                    }
                    // Encoder is drained, check to see if we've got a new frame of output from
                    // the decoder. (The output is going to a Surface, rather than a ByteBuffer,
                    // but we still get information through BufferInfo.)
                    if (!decoderDone)
                    {
                        int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                        if (decoderStatus == (int)MediaCodec.InfoTryAgainLater)
                        {
                            // no output available yet
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "no output from decoder available");
                            }
                            decoderOutputAvailable = false;
                        }
                        else if (decoderStatus == (int)MediaCodec.InfoOutputBuffersChanged)
                        {
                            //decoderOutputBuffers = decoder.getOutputBuffers();
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "decoder output buffers changed (we don't care)");
                            }
                        }
                        else if (decoderStatus == (int)MediaCodec.InfoOutputFormatChanged)
                        {
                            // expected before first buffer of data
                            MediaFormat newFormat = decoder.OutputFormat;
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "decoder output format changed: " + newFormat);
                            }
                        }
                        else if (decoderStatus < 0)
                        {
                            fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
                        }
                        else                             // decoderStatus >= 0
                        {
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.Size + "(");
                            }
                            // The ByteBuffers are null references, but we still get a nonzero
                            // size for the decoded data.
                            bool doRender = (info.Size != 0);
                            // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                            // to SurfaceTexture to convert to a texture. The API doesn't
                            // guarantee that the texture will be available before the call
                            // returns, so we need to wait for the onFrameAvailable callback to
                            // fire. If we don't wait, we risk rendering from the previous frame.
                            decoder.ReleaseOutputBuffer(decoderStatus, doRender);
                            if (doRender)
                            {
                                // This waits for the image and renders it after it arrives.
                                if (AppSettings.Logging.SendToConsole)
                                {
                                    Log.Debug(TAG, "awaiting frame");
                                }
                                outputSurface.AwaitNewImage();
                                outputSurface.DrawImage();
                                // Send it to the encoder.
                                inputSurface.SetPresentationTime(info.PresentationTimeUs * 1000);
                                if (AppSettings.Logging.SendToConsole)
                                {
                                    Log.Debug(TAG, "swapBuffers");
                                }
                                inputSurface.SwapBuffers();
                            }
                            if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0)
                            {
                                // forward decoder EOS to encoder
                                if (AppSettings.Logging.SendToConsole)
                                {
                                    Log.Debug(TAG, "signaling input EOS");
                                }
                                if (WORK_AROUND_BUGS)
                                {
                                    // Bail early, possibly dropping a frame.
                                    return;
                                }
                                else
                                {
                                    encoder.SignalEndOfInputStream();
                                }
                            }
                        }
                    }
                }
            }
            if (inputChunk != outputCount)
            {
                throw new RuntimeException("frame lost: " + inputChunk + " in, " +
                                           outputCount + " out");
            }
        }
Exemple #12
0
        public void Decode(MediaCodec _Decoder, MediaExtractor extractor)
        {
            Stopwatch s = new Stopwatch();

            s.Start();
            int TIMEOUT_USEC = 10000;

            ByteBuffer[] encoderInputBuffers = _Decoder.GetInputBuffers();
            ByteBuffer[] outputBuffers       = _Decoder.GetOutputBuffers();
            var          mBufferInfo         = new MediaCodec.BufferInfo();

            bool inputDone = false;
            var  index     = 0;

            try
            {
                while (true)
                {
                    if (!inputDone)
                    {
                        int inputBufIndex = _Decoder.DequeueInputBuffer(TIMEOUT_USEC);
                        if (inputBufIndex >= 0)
                        {
                            ByteBuffer buffer = encoderInputBuffers[inputBufIndex];
                            //long ptsUsec = computePresentationTime(frameIndex);

                            int sampleSize = extractor.ReadSampleData(buffer, 0);

                            if (sampleSize < 0)
                            {
                                //  Send an empty frame with the end-of-stream flag set.  If we set EOS on a frame with data, that frame data will be ignored, and the output will be short one frame.
                                _Decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0, MediaCodec.BufferFlagEndOfStream);
                                inputDone = true;
                                Log.Info(TAG, "sent input EOS (with zero-length frame)");
                            }
                            else
                            {
                                Log.Info(TAG, "adding encoded video to decoder input ");
                                _Decoder.QueueInputBuffer(inputBufIndex, 0, sampleSize, extractor.SampleTime, 0);
                                extractor.Advance();
                            }
                        }
                        else
                        {
                            //  either all in use, or we timed out during initial setup
                            Log.Warn(TAG, "input buffer not available");
                        }
                    }

                    //ByteBuffer[] encoderOutputBuffers = _Decoder.GetOutputBuffers();


                    int encoderStatus = _Decoder.DequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);

                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        Log.Info(TAG, "no output available, spinning to await EOS");
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        //  not expected for an encoder
                        Log.Warn(TAG, "not expected OutputBuffersChanged happened");
                        outputBuffers = _Decoder.GetOutputBuffers();
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        //  should happen before receiving buffers, and should only happen once
                        //if (_MuxerStarted)
                        //{
                        //    Log.Error(TAG, "format changed twice and should never happen");
                        //    throw new RuntimeException("format changed twice");
                        //}

                        //MediaFormat newFormat = _Decoder.OutputFormat;

                        //Log.Info(TAG, "format changed and starting MUX");
                        //_TrackIndex = _Muxer.AddTrack(newFormat);
                        //_Muxer.Start();
                        //_MuxerStarted = true;
                    }
                    else if (encoderStatus < 0)
                    {
                        Log.Warn(TAG, "unexpected but lets ignore");
                        //  let's ignore it
                    }
                    else
                    {
                        ByteBuffer encodedData = outputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            Log.Error(TAG, string.Format("encoderOutputBuffer {0} was null!!", encoderStatus));
                            throw new RuntimeException(string.Format("encoderOutputBuffer {0} was null!!", encoderStatus));
                        }

                        if ((mBufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0)
                        {
                            //  The codec config data was pulled out and fed to the muxer when we got
                            //  the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                            mBufferInfo.Size = 0;
                        }

                        if (mBufferInfo.Size != 0)
                        {
                            //if (!_MuxerStarted)
                            //{
                            //    Log.Error(TAG, "muxer hasnt started!!");
                            //    throw new RuntimeException("muxer hasnt started");
                            //}

                            //  adjust the ByteBuffer values to match BufferInfo (not needed?) old
                            //encodedData.Position(mBufferInfo.Offset);
                            //encodedData.Limit(mBufferInfo.Offset + this.mBufferInfo.Size);

                            try
                            {
                                //byte[] dst = new byte[outputBuffers[encoderStatus].Capacity()];
                                //outputBuffers[encoderStatus].Get(dst);

                                //ByteBuffer buffer = outputBuffers[encoderStatus];
                                //byte[] ba = new byte[encodedData.Remaining()];
                                //encodedData.Get(ba);
                                //ByteBuffer buffer = outputBuffers[encoderStatus];
                                //buffer.Position(mBufferInfo.Offset);
                                //buffer.Limit(mBufferInfo.Offset + mBufferInfo.Size);
                                //byte[] ba = new byte[buffer.Remaining()];
                                //buffer.Get(ba);
                                //if (index < 10)
                                //{
                                YuvImage yuv = Utils.GetYUVImage(encodedData, _CameraColorFormat, _Width, _Height);
                                //var imagedata = yuv.GetYuvData();

                                //Utils.swapNV21_NV12(ref imagedata, _Width, _Height);
                                //Image might need to be corrected later

                                //Bitmap b = Utils.GetBitmap(yuv, _Width, _Height);
                                //Bitmap bmp = BitmapFactory.DecodeByteArray(ba, 0, ba.Length);// this return null
                                //var createfilepath = new File(_downloadsfilesdir, DateTime.Now.Ticks + ".png").AbsolutePath;
                                //using (FileStream bos = new FileStream(createfilepath, FileMode.CreateNew))
                                //{
                                //    b.Compress(Bitmap.CompressFormat.Png, 100, bos);
                                //}
                                //b.Recycle();
                                //}
                                index++;
                                //writeFrameToSDCard(dst, i, dst.length);
                                //i++;
                            }
                            catch (Exception e)
                            {
                                //Log("iDecodeActivity", "Error while creating bitmap with: ");
                            }

                            _Decoder.ReleaseOutputBuffer(encoderStatus, false);
                        }


                        if ((mBufferInfo.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                        {
                            Log.Info(TAG, "End of Stream Reached!!");
                            break;
                        }
                    }
                }

                s.Stop();
                Log.Info("inner STOPWATCH!!!!:", string.Format("numberofframes = {0}, totaltime = {1}", index, s.ElapsedMilliseconds));
            }
            catch (Exception e)
            {
                Log.Error(TAG, "Decode or Muxer failed", e, e.Message);
                throw;
            }
        }