Ejemplo n.º 1
0
        // For audio: http://stackoverflow.com/questions/22673011/how-to-extract-pcm-samples-from-mediacodec-decoders-output

        private void EncodeCameraToMp4()
        {
            // arbitrary but popular values

            try {
                prepareMediaPlayer();
                prepareEncoder();
                _inputSurface.MakeCurrent();
                prepareSurfaceTexture();

                _mediaPlayer.Start();

                var st         = _outputSurface.SurfaceTexture;
                int frameCount = 0;

                var  curShad     = false;
                bool isCompleted = false;
                _mediaPlayer.Completion += (object sender, System.EventArgs e) =>
                {
                    isCompleted = true;
                };
                while (!isCompleted)
                {
                    // Feed any pending encoder output into the muxer.

                    drainEncoder(false);

                    if ((frameCount % _fps) == 0)
                    {
                        curShad = !curShad;
                    }

                    // We flash it between rgb and bgr to quickly demonstrate shading is working
                    if (curShad)
                    {
                        _outputSurface.ChangeFragmentShader(FRAGMENT_SHADER1);
                    }
                    else
                    {
                        _outputSurface.ChangeFragmentShader(FRAGMENT_SHADER2);
                    }

                    frameCount++;

                    // Acquire a new frame of input, and render it to the Surface.  If we had a
                    // GLSurfaceView we could switch EGL contexts and call drawImage() a second
                    // time to render it on screen.  The texture can be shared between contexts by
                    // passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context
                    // argument.
                    if (!_outputSurface.AwaitNewImage())
                    {
                        break;
                    }
                    _outputSurface.DrawImage();

                    // Set the presentation time stamp from the SurfaceTexture's time stamp.  This
                    // will be used by MediaMuxer to set the PTS in the video.

                    _inputSurface.SetPresentationTime(st.Timestamp);

                    // Submit it to the encoder.  The eglSwapBuffers call will block if the input
                    // is full, which would be bad if it stayed full until we dequeued an output
                    // buffer (which we can't do, since we're stuck here).  So long as we fully drain
                    // the encoder before supplying additional input, the system guarantees that we
                    // can supply another frame without blocking.
                    if (VERBOSE)
                    {
                        Log.Debug(TAG, "sending frame to encoder");
                    }
                    _inputSurface.SwapBuffers();
                }

                // send end-of-stream to encoder, and drain remaining output
                drainEncoder(true);
            } finally {
                // release everything we grabbed
                releaseMediaPlayer();
                releaseEncoder();
                releaseSurfaceTexture();
            }
        }
Ejemplo n.º 2
0
        private string EncodeFileToMp4(string inputPath, string outputPath, bool encodeAudio = true, Android.Net.Uri inputUri = null)
        {
            LatestInputVideoLength = AudioEncoding.GetVideoLength(inputPath, inputUri);
            LatestAudioInputFormat = AudioEncoding.GetAudioTrackFormat(inputPath, inputUri);
            EstimateTotalSize(LatestInputVideoLength, _bitRate);
            try
            {
                prepareMediaPlayer(inputPath, inputUri);
                prepareEncoder(outputPath);
                _inputSurface.MakeCurrent();
                prepareWeakSurfaceTexture();
                _mediaPlayer.Start();
                _mediaPlayer.SetAudioStreamType(Android.Media.Stream.VoiceCall);
                _mediaPlayer.SetVolume(0, 0);
                _frameCount = 0;
            }
            catch (System.Exception ex) { Log.Debug("VideoEncoder", ex.Message); }
            VideoEncodingInProgress = true;
            while (true)
            {
                D(false);
                _frameCount++;

                /*
                 * Disable this to make it faster when not debugging
                 */
#if DEBUG
                if (_frameCount >= 120 && AppSettings.Logging.SendToConsole)
                {
                    System.Console.WriteLine($"FileToMp4 exited @ {_outputSurface.WeakSurfaceTexture.Timestamp} " +
                                             $" | encoded bits {_bitsEncodedSoFar} of estimated {_estimatedTotalSize}");
                }
#endif
                // Acquire a new frame of input, and render it to the Surface.  If we had a
                // GLSurfaceView we could switch EGL contexts and call drawImage() a second
                // time to render it on screen.  The texture can be shared between contexts by
                // passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context
                // argument.
                if (!_outputSurface.AwaitNewImage(true))
                {
                    break;
                }
                _outputSurface.DrawImage();

                // Set the presentation time stamp from the WeakSurfaceTexture's time stamp.  This
                // will be used by MediaMuxer to set the PTS in the video.

                _inputSurface.SetPresentationTime(_outputSurface.WeakSurfaceTexture.Timestamp);

                //if (AppSettings.Logging.SendToConsole) Log.Debug("MediaLoop", "Set Time " + st.Timestamp);
                // Submit it to the encoder.  The eglSwapBuffers call will block if the input
                // is full, which would be bad if it stayed full until we dequeued an output
                // buffer (which we can't do, since we're stuck here).  So long as we fully drain
                // the encoder before supplying additional input, the system guarantees that we
                // can supply another frame without blocking.
                //if (AppSettings.Logging.SendToConsole) Log.Debug(TAG, "sending frame to encoder:");
                _inputSurface.SwapBuffers();
                if (_bitsEncodedSoFar >= _estimatedTotalSize)
                {
                    break;
                }
            }
            D(true);
            VideoEncodingInProgress = false;
#if DEBUG
            if (AppSettings.Logging.SendToConsole)
            {
                System.Console.WriteLine($"DrainEncoder started @ {_firstKnownBuffer} exited @ " +
                                         $"{_outputSurface.WeakSurfaceTexture.Timestamp}  " +
                                         $"| encoded bits {_bitsEncodedSoFar} of estimated {_estimatedTotalSize}");
            }
#endif
            try
            {
                releaseMediaPlayer();
                releaseEncoder();
                releaseWeakSurfaceTexture();
            }catch { }
            _firstKnownBuffer   = 0;
            _estimatedTotalSize = 0;
            _frameCount         = 0;
            _bitsEncodedSoFar   = 0;
            _bfi = new BufferInfo();
            if (!AudioEncodingInProgress)
            {
                _muxer.Stop(); // if the audio encoding isn't still running then we'll stop everything and return
                _muxer.Release();
                _muxer = null;
                if (File.Exists(outputPath))
                {
                    this.Progress.Invoke(new EncoderMinArgs(EncodedBits(_bfi.Size), _estimatedTotalSize, true, false, outputPath));
                    return(outputPath);
                }
            }
            this.Progress.Invoke(new EncoderMinArgs(EncodedBits(_bfi.Size), _estimatedTotalSize, false, false, null));
            return(null); //file isn't finished processing yet
        }
        /**
         * Edits a stream of video data.
         */
        private void editVideoData(VideoChunks inputData, MediaCodec decoder,
                                   OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
                                   VideoChunks outputData)
        {
            const int TIMEOUT_USEC = 10000;

            ByteBuffer[]          decoderInputBuffers  = decoder.GetInputBuffers();
            ByteBuffer[]          encoderOutputBuffers = encoder.GetOutputBuffers();
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int  inputChunk            = 0;
            int  outputCount           = 0;
            bool outputDone            = false;
            bool inputDone             = false;
            bool decoderDone           = false;

            while (!outputDone)
            {
                if (AppSettings.Logging.SendToConsole)
                {
                    Log.Debug(TAG, "edit loop");
                }
                // Feed more data to the decoder.
                if (!inputDone)
                {
                    int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC);
                    if (inputBufIndex >= 0)
                    {
                        if (inputChunk == inputData.NumChunks)
                        {
                            // End of stream -- send empty frame with EOS flag set.
                            decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L,
                                                     MediaCodecBufferFlags.EndOfStream);
                            inputDone = true;
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "sent input EOS (with zero-length frame)");
                            }
                        }
                        else
                        {
                            // Copy a chunk of input to the decoder. The first chunk should have
                            // the BUFFER_FLAG_CODEC_CONFIG flag set.
                            ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                            inputBuf.Clear();
                            inputData.getChunkData(inputChunk, inputBuf);
                            int  flags = inputData.getChunkFlags(inputChunk);
                            long time  = inputData.getChunkTime(inputChunk);
                            decoder.QueueInputBuffer(inputBufIndex, 0, inputBuf.Position(),
                                                     time, (MediaCodecBufferFlags)flags);             // TODO: Not sure if it's MediaCodecBufferFlags, verify.
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "submitted frame " + inputChunk + " to dec, size=" +
                                          inputBuf.Position() + " flags=" + flags);
                            }
                            inputChunk++;
                        }
                    }
                    else
                    {
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "input buffer not available");
                        }
                    }
                }
                // Assume output is available. Loop until both assumptions are false.
                bool decoderOutputAvailable = !decoderDone;
                bool encoderOutputAvailable = true;
                while (decoderOutputAvailable || encoderOutputAvailable)
                {
                    // Start by draining any pending output from the encoder. It's important to
                    // do this before we try to stuff any more data in.
                    int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // no output available yet
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "no output from encoder available");
                        }
                        encoderOutputAvailable = false;
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        encoderOutputBuffers = encoder.GetOutputBuffers();
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "encoder output buffers changed");
                        }
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        MediaFormat newFormat = encoder.OutputFormat;
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "encoder output format changed: " + newFormat);
                        }
                    }
                    else if (encoderStatus < 0)
                    {
                        fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    }
                    else                         // encoderStatus >= 0
                    {
                        ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            fail("encoderOutputBuffer " + encoderStatus + " was null");
                        }
                        // Write the data to the output "file".
                        if (info.Size != 0)
                        {
                            encodedData.Position(info.Offset);
                            encodedData.Limit(info.Offset + info.Size);
                            outputData.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs);
                            outputCount++;
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "encoder output " + info.Size + " bytes");
                            }
                        }
                        outputDone = (info.Flags & MediaCodec.BufferFlagEndOfStream) != 0;
                        encoder.ReleaseOutputBuffer(encoderStatus, false);
                    }
                    if (encoderStatus != (int)MediaCodec.InfoTryAgainLater)
                    {
                        // Continue attempts to drain output.
                        continue;
                    }
                    // Encoder is drained, check to see if we've got a new frame of output from
                    // the decoder. (The output is going to a Surface, rather than a ByteBuffer,
                    // but we still get information through BufferInfo.)
                    if (!decoderDone)
                    {
                        int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                        if (decoderStatus == (int)MediaCodec.InfoTryAgainLater)
                        {
                            // no output available yet
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "no output from decoder available");
                            }
                            decoderOutputAvailable = false;
                        }
                        else if (decoderStatus == (int)MediaCodec.InfoOutputBuffersChanged)
                        {
                            //decoderOutputBuffers = decoder.getOutputBuffers();
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "decoder output buffers changed (we don't care)");
                            }
                        }
                        else if (decoderStatus == (int)MediaCodec.InfoOutputFormatChanged)
                        {
                            // expected before first buffer of data
                            MediaFormat newFormat = decoder.OutputFormat;
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "decoder output format changed: " + newFormat);
                            }
                        }
                        else if (decoderStatus < 0)
                        {
                            fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
                        }
                        else                             // decoderStatus >= 0
                        {
                            if (AppSettings.Logging.SendToConsole)
                            {
                                Log.Debug(TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.Size + "(");
                            }
                            // The ByteBuffers are null references, but we still get a nonzero
                            // size for the decoded data.
                            bool doRender = (info.Size != 0);
                            // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                            // to SurfaceTexture to convert to a texture. The API doesn't
                            // guarantee that the texture will be available before the call
                            // returns, so we need to wait for the onFrameAvailable callback to
                            // fire. If we don't wait, we risk rendering from the previous frame.
                            decoder.ReleaseOutputBuffer(decoderStatus, doRender);
                            if (doRender)
                            {
                                // This waits for the image and renders it after it arrives.
                                if (AppSettings.Logging.SendToConsole)
                                {
                                    Log.Debug(TAG, "awaiting frame");
                                }
                                outputSurface.AwaitNewImage();
                                outputSurface.DrawImage();
                                // Send it to the encoder.
                                inputSurface.SetPresentationTime(info.PresentationTimeUs * 1000);
                                if (AppSettings.Logging.SendToConsole)
                                {
                                    Log.Debug(TAG, "swapBuffers");
                                }
                                inputSurface.SwapBuffers();
                            }
                            if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0)
                            {
                                // forward decoder EOS to encoder
                                if (AppSettings.Logging.SendToConsole)
                                {
                                    Log.Debug(TAG, "signaling input EOS");
                                }
                                if (WORK_AROUND_BUGS)
                                {
                                    // Bail early, possibly dropping a frame.
                                    return;
                                }
                                else
                                {
                                    encoder.SignalEndOfInputStream();
                                }
                            }
                        }
                    }
                }
            }
            if (inputChunk != outputCount)
            {
                throw new RuntimeException("frame lost: " + inputChunk + " in, " +
                                           outputCount + " out");
            }
        }
Ejemplo n.º 4
0
        private void encodeCameraToMpeg()
        {
            // arbitrary but popular values
            int encWidth   = 640;
            int encHeight  = 480;
            int encBitRate = 6000000;                  // Mbps

            Log.Debug(TAG, MIME_TYPE + " output " + encWidth + "x" + encHeight + " @" + encBitRate);

            try {
                prepareCamera(encWidth, encHeight);
                prepareEncoder(encWidth, encHeight, encBitRate);
                _inputSurface.MakeCurrent();
                prepareSurfaceTexture();

                _camera.StartPreview();

                long startWhen  = JavaSystem.NanoTime();
                long desiredEnd = startWhen + DURATION_SEC * 1000000000L;
                var  st         = _outputSurface.SurfaceTexture;
                int  frameCount = 0;

                var curShad = false;

                while (JavaSystem.NanoTime() < desiredEnd)
                {
                    // Feed any pending encoder output into the muxer.
                    drainEncoder(false);

                    if ((frameCount % 24) == 0)
                    {
                        curShad = !curShad;
                    }

                    if (curShad)
                    {
                        _outputSurface.ChangeFragmentShader(FRAGMENT_SHADER1);
                    }
                    else
                    {
                        _outputSurface.ChangeFragmentShader(FRAGMENT_SHADER2);
                    }

                    frameCount++;

                    // Acquire a new frame of input, and render it to the Surface.  If we had a
                    // GLSurfaceView we could switch EGL contexts and call drawImage() a second
                    // time to render it on screen.  The texture can be shared between contexts by
                    // passing the GLSurfaceView's EGLContext as eglCreateContext()'s share_context
                    // argument.
                    _outputSurface.AwaitNewImage();
                    _outputSurface.DrawImage();

                    // Set the presentation time stamp from the SurfaceTexture's time stamp.  This
                    // will be used by MediaMuxer to set the PTS in the video.
                    if (AppSettings.Logging.SendToConsole)
                    {
                        Log.Debug(TAG, "present: " +
                                  ((st.Timestamp - startWhen) / 1000000.0) + "ms");
                    }
                    _inputSurface.SetPresentationTime(st.Timestamp);

                    // Submit it to the encoder.  The eglSwapBuffers call will block if the input
                    // is full, which would be bad if it stayed full until we dequeued an output
                    // buffer (which we can't do, since we're stuck here).  So long as we fully drain
                    // the encoder before supplying additional input, the system guarantees that we
                    // can supply another frame without blocking.
                    if (AppSettings.Logging.SendToConsole)
                    {
                        Log.Debug(TAG, "sending frame to encoder");
                    }
                    _inputSurface.SwapBuffers();
                }

                // send end-of-stream to encoder, and drain remaining output
                drainEncoder(true);
            } finally {
                // release everything we grabbed
                releaseCamera();
                releaseEncoder();
                releaseSurfaceTexture();
            }
        }
        /**
         * Generates video frames, feeds them into the encoder, and writes the output to the
         * VideoChunks instance.
         */
        private void generateVideoData(MediaCodec encoder, InputSurface inputSurface,
                                       VideoChunks output)
        {
            ByteBuffer[]          encoderOutputBuffers = encoder.GetOutputBuffers();
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int generateIndex          = 0;
            int outputCount            = 0;
            // Loop until the output side is done.
            bool inputDone  = false;
            bool outputDone = false;

            while (!outputDone)
            {
                if (AppSettings.Logging.SendToConsole)
                {
                    Log.Debug(TAG, "gen loop");
                }
                // If we're not done submitting frames, generate a new one and submit it. The
                // eglSwapBuffers call will block if the input is full.
                if (!inputDone)
                {
                    if (generateIndex == NUM_FRAMES)
                    {
                        // Send an empty frame with the end-of-stream flag set.
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "signaling input EOS");
                        }
                        if (WORK_AROUND_BUGS)
                        {
                            // Might drop a frame, but at least we won't crash mediaserver.
                            try { Thread.Sleep(500); } catch (InterruptedException ie) {}
                            outputDone = true;
                        }
                        else
                        {
                            encoder.SignalEndOfInputStream();
                        }
                        inputDone = true;
                    }
                    else
                    {
                        generateSurfaceFrame(generateIndex);
                        inputSurface.SetPresentationTime(computePresentationTime(generateIndex) * 1000);
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "inputSurface swapBuffers");
                        }
                        inputSurface.SwapBuffers();
                    }
                    generateIndex++;
                }
                // Check for output from the encoder. If there's no output yet, we either need to
                // provide more input, or we need to wait for the encoder to work its magic. We
                // can't actually tell which is the case, so if we can't get an output buffer right
                // away we loop around and see if it wants more input.
                //
                // If we do find output, drain it all before supplying more input.
                while (true)
                {
                    int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // no output available yet
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "no output from encoder available");
                        }
                        break;                         // out of while
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        // not expected for an encoder
                        encoderOutputBuffers = encoder.GetOutputBuffers();
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "encoder output buffers changed");
                        }
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        // not expected for an encoder
                        MediaFormat newFormat = encoder.OutputFormat;
                        if (AppSettings.Logging.SendToConsole)
                        {
                            Log.Debug(TAG, "encoder output format changed: " + newFormat);
                        }
                    }
                    else if (encoderStatus < 0)
                    {
                        fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    }
                    else                         // encoderStatus >= 0
                    {
                        ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            fail("encoderOutputBuffer " + encoderStatus + " was null");
                        }
                        // Codec config flag must be set iff this is the first chunk of output. This
                        // may not hold for all codecs, but it appears to be the case for video/avc.
                        assertTrue((info.Flags & MediaCodec.BufferFlagCodecConfig) != 0 ||
                                   outputCount != 0);
                        if (info.Size != 0)
                        {
                            // Adjust the ByteBuffer values to match BufferInfo.
                            encodedData.Position(info.Offset);
                            encodedData.Limit(info.Offset + info.Size);
                            output.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs);
                            outputCount++;
                        }
                        encoder.ReleaseOutputBuffer(encoderStatus, false);
                        if ((info.Flags & MediaCodec.BufferFlagEndOfStream) != 0)
                        {
                            outputDone = true;
                            break;                             // out of while
                        }
                    }
                }
            }


            assertEquals("Frame count", NUM_FRAMES + 1, outputCount);
        }